summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/frontend/runner/components/runner_type_badge_spec.js40
-rw-r--r--spec/frontend/runner/runner_detail/runner_detail_app_spec.js29
-rw-r--r--spec/frontend/runner/runner_detail/runner_details_app_spec.js71
-rw-r--r--spec/frontend/sidebar/mock_data.js77
-rw-r--r--spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb24
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb5
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb13
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb18
-rw-r--r--spec/models/release_spec.rb12
-rw-r--r--spec/support/helpers/snowplow_helpers.rb8
-rw-r--r--spec/workers/concerns/limited_capacity/job_tracker_spec.rb48
-rw-r--r--spec/workers/concerns/limited_capacity/worker_spec.rb137
12 files changed, 236 insertions, 246 deletions
diff --git a/spec/frontend/runner/components/runner_type_badge_spec.js b/spec/frontend/runner/components/runner_type_badge_spec.js
new file mode 100644
index 00000000000..8e52d3398bd
--- /dev/null
+++ b/spec/frontend/runner/components/runner_type_badge_spec.js
@@ -0,0 +1,40 @@
+import { GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import RunnerTypeBadge from '~/runner/components/runner_type_badge.vue';
+import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/runner/constants';
+
+describe('RunnerTypeBadge', () => {
+ let wrapper;
+
+ const findBadge = () => wrapper.findComponent(GlBadge);
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = shallowMount(RunnerTypeBadge, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ type | text | variant
+ ${INSTANCE_TYPE} | ${'shared'} | ${'success'}
+ ${GROUP_TYPE} | ${'group'} | ${'success'}
+ ${PROJECT_TYPE} | ${'specific'} | ${'info'}
+ `('displays $type runner with as "$text" with a $variant variant ', ({ type, text, variant }) => {
+ createComponent({ props: { type } });
+
+ expect(findBadge().text()).toBe(text);
+ expect(findBadge().props('variant')).toBe(variant);
+ });
+
+ it('does not display a badge when type is unknown', () => {
+ createComponent({ props: { type: 'AN_UNKNOWN_VALUE' } });
+
+ expect(findBadge().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/runner/runner_detail/runner_detail_app_spec.js b/spec/frontend/runner/runner_detail/runner_detail_app_spec.js
deleted file mode 100644
index 5caa37c8cb3..00000000000
--- a/spec/frontend/runner/runner_detail/runner_detail_app_spec.js
+++ /dev/null
@@ -1,29 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import RunnerDetailsApp from '~/runner/runner_details/runner_details_app.vue';
-
-const mockRunnerId = '55';
-
-describe('RunnerDetailsApp', () => {
- let wrapper;
-
- const createComponent = (props) => {
- wrapper = shallowMount(RunnerDetailsApp, {
- propsData: {
- runnerId: mockRunnerId,
- ...props,
- },
- });
- };
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('displays the runner id', () => {
- expect(wrapper.text()).toContain('Runner #55');
- });
-});
diff --git a/spec/frontend/runner/runner_detail/runner_details_app_spec.js b/spec/frontend/runner/runner_detail/runner_details_app_spec.js
new file mode 100644
index 00000000000..c61cb647ae6
--- /dev/null
+++ b/spec/frontend/runner/runner_detail/runner_details_app_spec.js
@@ -0,0 +1,71 @@
+import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import RunnerTypeBadge from '~/runner/components/runner_type_badge.vue';
+import { INSTANCE_TYPE } from '~/runner/constants';
+import getRunnerQuery from '~/runner/graphql/get_runner.query.graphql';
+import RunnerDetailsApp from '~/runner/runner_details/runner_details_app.vue';
+
+const mockRunnerId = '55';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('RunnerDetailsApp', () => {
+ let wrapper;
+ let mockRunnerQuery;
+
+ const findRunnerTypeBadge = () => wrapper.findComponent(RunnerTypeBadge);
+
+ const createComponentWithApollo = ({ props = {}, mountFn = shallowMount } = {}) => {
+ const handlers = [[getRunnerQuery, mockRunnerQuery]];
+
+ wrapper = mountFn(RunnerDetailsApp, {
+ localVue,
+ apolloProvider: createMockApollo(handlers),
+ propsData: {
+ runnerId: mockRunnerId,
+ ...props,
+ },
+ });
+
+ return waitForPromises();
+ };
+
+ beforeEach(async () => {
+ mockRunnerQuery = jest.fn().mockResolvedValue({
+ data: {
+ runner: {
+ id: `gid://gitlab/Ci::Runner/${mockRunnerId}`,
+ runnerType: INSTANCE_TYPE,
+ __typename: 'CiRunner',
+ },
+ },
+ });
+ });
+
+ afterEach(() => {
+ mockRunnerQuery.mockReset();
+ wrapper.destroy();
+ });
+
+ it('expect GraphQL ID to be requested', async () => {
+ await createComponentWithApollo();
+
+ expect(mockRunnerQuery).toHaveBeenCalledWith({ id: `gid://gitlab/Ci::Runner/${mockRunnerId}` });
+ });
+
+ it('displays the runner id', async () => {
+ await createComponentWithApollo();
+
+ expect(wrapper.text()).toContain('Runner #55');
+ });
+
+ it('displays the runner type', async () => {
+ await createComponentWithApollo({ mountFn: mount });
+
+ expect(findRunnerTypeBadge().text()).toBe('shared');
+ });
+});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index f51d2f3d459..8a969d64467 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -380,6 +380,25 @@ export const subscriptionNullResponse = {
},
};
+const mockUser1 = {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+};
+
+const mockUser2 = {
+ id: 'gid://gitlab/User/4',
+ avatarUrl: '/avatar2',
+ name: 'rookie',
+ username: 'rookie',
+ webUrl: 'rookie',
+ status: null,
+};
+
export const searchResponse = {
data: {
workspace: {
@@ -387,24 +406,10 @@ export const searchResponse = {
users: {
nodes: [
{
- user: {
- id: '1',
- avatarUrl: '/avatar',
- name: 'root',
- username: 'root',
- webUrl: 'root',
- status: null,
- },
+ user: mockUser1,
},
{
- user: {
- id: '2',
- avatarUrl: '/avatar2',
- name: 'rookie',
- username: 'rookie',
- webUrl: 'rookie',
- status: null,
- },
+ user: mockUser2,
},
],
},
@@ -418,27 +423,13 @@ export const projectMembersResponse = {
__typename: 'Project',
users: {
nodes: [
- {
- user: {
- id: 'gid://gitlab/User/1',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- name: 'Administrator',
- username: 'root',
- webUrl: '/root',
- status: null,
- },
- },
- {
- user: {
- id: '2',
- avatarUrl: '/avatar2',
- name: 'rookie',
- username: 'rookie',
- webUrl: 'rookie',
- status: null,
- },
- },
+ // Remove nulls https://gitlab.com/gitlab-org/gitlab/-/issues/329750
+ null,
+ null,
+ // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
+ mockUser1,
+ mockUser1,
+ mockUser2,
{
user: {
id: 'gid://gitlab/User/2',
@@ -468,15 +459,9 @@ export const participantsQueryResponse = {
iid: '1',
participants: {
nodes: [
- {
- id: 'gid://gitlab/User/1',
- avatarUrl:
- 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
- name: 'Administrator',
- username: 'root',
- webUrl: '/root',
- status: null,
- },
+ // Remove duplicated entry https://gitlab.com/gitlab-org/gitlab/-/issues/327822
+ mockUser1,
+ mockUser1,
{
id: 'gid://gitlab/User/2',
avatarUrl:
diff --git a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
index 4f7c44e5d4e..25c4001a192 100644
--- a/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
+++ b/spec/lib/generators/gitlab/snowplow_event_definition_generator_spec.rb
@@ -32,6 +32,30 @@ RSpec.describe Gitlab::SnowplowEventDefinitionGenerator do
expect(::Gitlab::Config::Loader::Yaml.new(File.read(event_definition_path)).load_raw!).to eq(sample_event)
end
+ context 'event definition already exists' do
+ before do
+ stub_const('Gitlab::VERSION', '12.11.0-pre')
+ described_class.new([], generator_options).invoke_all
+ end
+
+ it 'overwrites event definition --force flag set to true' do
+ sample_event = ::Gitlab::Config::Loader::Yaml.new(fixture_file(File.join(sample_event_dir, 'sample_event.yml'))).load_raw!
+
+ stub_const('Gitlab::VERSION', '13.11.0-pre')
+ described_class.new([], generator_options.merge('force' => true)).invoke_all
+
+ event_definition_path = File.join(ce_temp_dir, 'groups__email_campaigns_controller_click.yml')
+ event_data = ::Gitlab::Config::Loader::Yaml.new(File.read(event_definition_path)).load_raw!
+
+ expect(event_data).to eq(sample_event)
+ end
+
+ it 'raises error when --force flag set to false' do
+ expect { described_class.new([], generator_options.merge('force' => false)).invoke_all }
+ .to raise_error(StandardError, /Event definition already exists at/)
+ end
+ end
+
it 'creates EE event definition file using the template' do
sample_event = ::Gitlab::Config::Loader::Yaml.new(fixture_file(File.join(sample_event_dir, 'sample_event_ee.yml'))).load_raw!
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 4000e0b2611..194dfb228ee 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -32,8 +32,9 @@ RSpec.describe Gitlab::GithubImport::Client do
it 'returns the pull request reviews' do
client = described_class.new('foo')
- expect(client.octokit).to receive(:pull_request_reviews).with('foo/bar', 999)
- expect(client).to receive(:with_rate_limit).and_yield
+ expect(client)
+ .to receive(:each_object)
+ .with(:pull_request_reviews, 'foo/bar', 999)
client.pull_request_reviews('foo/bar', 999)
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
index b859cc727a6..4a47d103cde 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb
@@ -23,12 +23,11 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do
end
describe '#id_for_already_imported_cache' do
- it { expect(subject.id_for_already_imported_cache(double(number: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
end
- describe '#each_object_to_import' do
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do
- pull_request = double
create(
:merged_merge_request,
iid: 999,
@@ -36,12 +35,18 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do
target_project: project
)
+ pull_request = double
+
allow(client)
.to receive(:pull_request)
+ .exactly(:once) # ensure to be cached on the second call
.with('http://somegithub.com', 999)
.and_return(pull_request)
- expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(pull_request)
+ expect { |b| subject.each_object_to_import(&b) }
+ .to yield_with_args(pull_request)
+
+ subject.each_object_to_import {}
end
end
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
index 5e2302f9662..f18064f10aa 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_reviews_importer_spec.rb
@@ -23,12 +23,18 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
end
describe '#id_for_already_imported_cache' do
- it { expect(subject.id_for_already_imported_cache(double(github_id: 1))).to eq(1) }
+ it { expect(subject.id_for_already_imported_cache(double(id: 1))).to eq(1) }
end
- describe '#each_object_to_import' do
+ describe '#each_object_to_import', :clean_gitlab_redis_cache do
it 'fetchs the merged pull requests data' do
- merge_request = create(:merge_request, source_project: project)
+ merge_request = create(
+ :merged_merge_request,
+ iid: 999,
+ source_project: project,
+ target_project: project
+ )
+
review = double
expect(review)
@@ -37,10 +43,14 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsReviewsImporter do
allow(client)
.to receive(:pull_request_reviews)
+ .exactly(:once) # ensure to be cached on the second call
.with('github/repo', merge_request.iid)
.and_return([review])
- expect { |b| subject.each_object_to_import(&b) }.to yield_with_args(review)
+ expect { |b| subject.each_object_to_import(&b) }
+ .to yield_with_args(review)
+
+ subject.each_object_to_import {}
end
end
end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 836ffadd7f7..b88813b3328 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -48,18 +48,6 @@ RSpec.describe Release do
expect(release.errors.full_messages)
.to include("Description is too long (maximum is #{Gitlab::Database::MAX_TEXT_SIZE_LIMIT} characters)")
end
-
- context 'when validate_release_description_length feature flag is disabled' do
- before do
- stub_feature_flags(validate_release_description_length: false)
- end
-
- it 'does not create a validation error' do
- release.validate
-
- expect(release.errors.full_messages).to be_empty
- end
- end
end
context 'when a release is tied to a milestone for another project' do
diff --git a/spec/support/helpers/snowplow_helpers.rb b/spec/support/helpers/snowplow_helpers.rb
index 70a4eadd8de..daff0bc8e14 100644
--- a/spec/support/helpers/snowplow_helpers.rb
+++ b/spec/support/helpers/snowplow_helpers.rb
@@ -71,7 +71,11 @@ module SnowplowHelpers
# expect_no_snowplow_event
# end
# end
- def expect_no_snowplow_event
- expect(Gitlab::Tracking).not_to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking
+ def expect_no_snowplow_event(category: nil, action: nil, **kwargs)
+ if category && action
+ expect(Gitlab::Tracking).not_to have_received(:event).with(category, action, **kwargs) # rubocop:disable RSpec/ExpectGitlabTracking
+ else
+ expect(Gitlab::Tracking).not_to have_received(:event) # rubocop:disable RSpec/ExpectGitlabTracking
+ end
end
end
diff --git a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
index 2c79f347903..f141a1ad7ad 100644
--- a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
@@ -7,30 +7,30 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
described_class.new('namespace')
end
+ let(:max_jids) { 10 }
+
describe '#register' do
it 'adds jid to the set' do
- job_tracker.register('a-job-id')
-
+ expect(job_tracker.register('a-job-id', max_jids)). to be true
expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end
- it 'updates the counter' do
- expect { job_tracker.register('a-job-id') }
- .to change { job_tracker.count }
- .from(0)
- .to(1)
- end
-
- it 'does it in only one Redis call' do
- expect(job_tracker).to receive(:with_redis).once.and_call_original
+ it 'returns false if the jid was not added' do
+ max_jids = 2
+ %w[jid1 jid2].each do |jid|
+ expect(job_tracker.register(jid, max_jids)).to be true
+ end
- job_tracker.register('a-job-id')
+ expect(job_tracker.register('jid3', max_jids)).to be false
+ expect(job_tracker.running_jids).to contain_exactly(*%w[jid1 jid2])
end
end
describe '#remove' do
before do
- job_tracker.register(%w[a-job-id other-job-id])
+ %w[a-job-id other-job-id].each do |jid|
+ job_tracker.register(jid, max_jids)
+ end
end
it 'removes jid from the set' do
@@ -38,24 +38,11 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end
-
- it 'updates the counter' do
- expect { job_tracker.remove('other-job-id') }
- .to change { job_tracker.count }
- .from(2)
- .to(1)
- end
-
- it 'does it in only one Redis call' do
- expect(job_tracker).to receive(:with_redis).once.and_call_original
-
- job_tracker.remove('other-job-id')
- end
end
describe '#clean_up' do
before do
- job_tracker.register('a-job-id')
+ job_tracker.register('a-job-id', max_jids)
end
context 'with running jobs' do
@@ -83,13 +70,6 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_queues do
.to change { job_tracker.running_jids.include?('a-job-id') }
end
- it 'updates the counter' do
- expect { job_tracker.clean_up }
- .to change { job_tracker.count }
- .from(1)
- .to(0)
- end
-
it 'gets the job ids, removes them, and updates the counter with only two Redis calls' do
expect(job_tracker).to receive(:with_redis).twice.and_call_original
diff --git a/spec/workers/concerns/limited_capacity/worker_spec.rb b/spec/workers/concerns/limited_capacity/worker_spec.rb
index 2c33c8666ec..790b5c3544d 100644
--- a/spec/workers/concerns/limited_capacity/worker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/worker_spec.rb
@@ -44,40 +44,22 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
describe '.perform_with_capacity' do
subject(:perform_with_capacity) { worker_class.perform_with_capacity(:arg) }
+ let(:max_running_jobs) { 3 }
+
before do
expect_next_instance_of(worker_class) do |instance|
expect(instance).to receive(:remove_failed_jobs)
- expect(instance).to receive(:report_prometheus_metrics)
-
- allow(instance).to receive(:remaining_work_count).and_return(remaining_work_count)
- allow(instance).to receive(:remaining_capacity).and_return(remaining_capacity)
- end
- end
-
- context 'when capacity is larger than work' do
- let(:remaining_work_count) { 2 }
- let(:remaining_capacity) { 3 }
- it 'enqueues jobs for remaining work' do
- expect(worker_class)
- .to receive(:bulk_perform_async)
- .with([[:arg], [:arg]])
-
- perform_with_capacity
+ allow(instance).to receive(:max_running_jobs).and_return(max_running_jobs)
end
end
- context 'when capacity is lower than work' do
- let(:remaining_work_count) { 5 }
- let(:remaining_capacity) { 3 }
-
- it 'enqueues jobs for remaining work' do
- expect(worker_class)
- .to receive(:bulk_perform_async)
- .with([[:arg], [:arg], [:arg]])
+ it 'enqueues jobs' do
+ expect(worker_class)
+ .to receive(:bulk_perform_async)
+ .with([[:arg], [:arg], [:arg]])
- perform_with_capacity
- end
+ perform_with_capacity
end
end
@@ -104,34 +86,27 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform
end
- it 'registers itself in the running set' do
+ it 'reports prometheus metrics' do
allow(worker).to receive(:perform_work)
- expect(job_tracker).to receive(:register).with('my-jid')
+ expect(worker).to receive(:report_prometheus_metrics).once.and_call_original
+ expect(worker).to receive(:report_running_jobs_metrics).twice.and_call_original
perform
end
- it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove).with('my-jid')
-
+ it 'updates the running set' do
+ expect(job_tracker.running_jids).to be_empty
allow(worker).to receive(:perform_work)
perform
- end
- it 'reports prometheus metrics' do
- allow(worker).to receive(:perform_work)
- expect(worker).to receive(:report_prometheus_metrics).once.and_call_original
- expect(worker).to receive(:report_running_jobs_metrics).twice.and_call_original
-
- perform
+ expect(job_tracker.running_jids).to be_empty
end
end
context 'with capacity and without work' do
before do
allow(worker).to receive(:max_running_jobs).and_return(10)
- allow(worker).to receive(:running_jobs_count).and_return(0)
allow(worker).to receive(:remaining_work_count).and_return(0)
allow(worker).to receive(:perform_work)
end
@@ -146,7 +121,7 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
context 'without capacity' do
before do
allow(worker).to receive(:max_running_jobs).and_return(10)
- allow(worker).to receive(:running_jobs_count).and_return(15)
+ allow(job_tracker).to receive(:register).and_return(false)
allow(worker).to receive(:remaining_work_count).and_return(10)
end
@@ -161,27 +136,14 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
perform
end
-
- it 'does not register in the running set' do
- expect(job_tracker).not_to receive(:register)
-
- perform
- end
-
- it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove).with('my-jid')
-
- perform
- end
-
- it 'reports prometheus metrics' do
- expect(worker).to receive(:report_prometheus_metrics)
-
- perform
- end
end
context 'when perform_work fails' do
+ before do
+ allow(worker).to receive(:max_running_jobs).and_return(10)
+ allow(job_tracker).to receive(:register).and_return(true)
+ end
+
it 'does not re-enqueue itself' do
expect(worker).not_to receive(:re_enqueue)
@@ -189,7 +151,7 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
end
it 'removes itself from the running set' do
- expect(job_tracker).to receive(:remove)
+ expect(job_tracker).to receive(:remove).with('my-jid')
expect { perform }.to raise_error(NotImplementedError)
end
@@ -202,65 +164,14 @@ RSpec.describe LimitedCapacity::Worker, :clean_gitlab_redis_queues, :aggregate_f
end
end
- describe '#remaining_capacity' do
- subject(:remaining_capacity) { worker.remaining_capacity }
-
- before do
- expect(worker).to receive(:max_running_jobs).and_return(max_capacity)
- end
-
- context 'when changing the capacity to a lower value' do
- let(:max_capacity) { -1 }
-
- it { expect(remaining_capacity).to eq(0) }
- end
-
- context 'when registering new jobs' do
- let(:max_capacity) { 2 }
-
- before do
- job_tracker.register('a-job-id')
- end
-
- it { expect(remaining_capacity).to eq(1) }
- end
-
- context 'with jobs in the queue' do
- let(:max_capacity) { 2 }
-
- before do
- expect(worker_class).to receive(:queue_size).and_return(1)
- end
-
- it { expect(remaining_capacity).to eq(1) }
- end
-
- context 'with both running jobs and queued jobs' do
- let(:max_capacity) { 10 }
-
- before do
- expect(worker_class).to receive(:queue_size).and_return(5)
- expect(worker).to receive(:running_jobs_count).and_return(3)
- end
-
- it { expect(remaining_capacity).to eq(2) }
- end
- end
-
describe '#remove_failed_jobs' do
subject(:remove_failed_jobs) { worker.remove_failed_jobs }
- before do
- job_tracker.register('a-job-id')
- allow(worker).to receive(:max_running_jobs).and_return(2)
+ it 'removes failed jobs' do
+ job_tracker.register('a-job-id', 10)
expect(job_tracker).to receive(:clean_up).and_call_original
- end
-
- context 'with failed jobs' do
- it 'update the available capacity' do
- expect { remove_failed_jobs }.to change { worker.remaining_capacity }.by(1)
- end
+ expect { remove_failed_jobs }.to change { job_tracker.running_jids.size }.by(-1)
end
end