summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-04-15 18:09:36 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-04-15 18:09:36 +0000
commit09093c1278f2b3347465bf5939fda215592f0d1f (patch)
tree07976e435f6c73de8faf7a4cc891ebcfacee62d1 /spec
parent0d83264a7a24b33de933437355c77a4fe47e5419 (diff)
downloadgitlab-ce-09093c1278f2b3347465bf5939fda215592f0d1f.tar.gz
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/factories/ci/job_artifacts.rb2
-rw-r--r--spec/fixtures/lib/elasticsearch/pods_query.json28
-rw-r--r--spec/fixtures/lib/elasticsearch/pods_response.json75
-rw-r--r--spec/frontend/jira_import/components/jira_import_form_spec.js6
-rw-r--r--spec/frontend/snippets/components/snippet_header_spec.js10
-rw-r--r--spec/frontend/static_site_editor/components/invalid_content_message_spec.js23
-rw-r--r--spec/frontend/static_site_editor/components/publish_toolbar_spec.js4
-rw-r--r--spec/frontend/static_site_editor/components/static_site_editor_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap8
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js102
-rw-r--r--spec/lib/gitlab/application_context_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb32
-rw-r--r--spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb46
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb2
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/lines_spec.rb (renamed from spec/lib/gitlab/elasticsearch/logs_spec.rb)2
-rw-r--r--spec/lib/gitlab/elasticsearch/logs/pods_spec.rb35
-rw-r--r--spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb35
-rw-r--r--spec/models/ci/job_artifact_spec.rb21
-rw-r--r--spec/models/cycle_analytics/group_level_spec.rb2
-rw-r--r--spec/models/merge_request_spec.rb66
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb11
-rw-r--r--spec/requests/api/merge_requests_spec.rb8
-rw-r--r--spec/serializers/discussion_entity_spec.rb10
-rw-r--r--spec/serializers/merge_request_basic_entity_spec.rb17
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb6
-rw-r--r--spec/services/pod_logs/base_service_spec.rb27
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb63
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb32
-rw-r--r--spec/uploaders/records_uploads_spec.rb6
-rw-r--r--spec/workers/concerns/cronjob_queue_spec.rb22
30 files changed, 651 insertions, 76 deletions
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index a259c5142fc..82383cfa2b0 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -13,7 +13,7 @@ FactoryBot.define do
end
trait :remote_store do
- file_store { JobArtifactUploader::Store::REMOTE }
+ file_store { JobArtifactUploader::Store::REMOTE}
end
after :build do |artifact|
diff --git a/spec/fixtures/lib/elasticsearch/pods_query.json b/spec/fixtures/lib/elasticsearch/pods_query.json
new file mode 100644
index 00000000000..90d162b871a
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/pods_query.json
@@ -0,0 +1,28 @@
+{
+ "aggs": {
+ "pods": {
+ "aggs": {
+ "containers": {
+ "terms": {
+ "field": "kubernetes.container.name",
+ "size": 500
+ }
+ }
+ },
+ "terms": {
+ "field": "kubernetes.pod.name",
+ "size": 500
+ }
+ }
+ },
+ "query": {
+ "bool": {
+ "must": {
+ "match_phrase": {
+ "kubernetes.namespace": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ },
+ "size": 0
+}
diff --git a/spec/fixtures/lib/elasticsearch/pods_response.json b/spec/fixtures/lib/elasticsearch/pods_response.json
new file mode 100644
index 00000000000..d923f914d7c
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/pods_response.json
@@ -0,0 +1,75 @@
+{
+ "took": 8540,
+ "timed_out": false,
+ "_shards": {
+ "total": 153,
+ "successful": 153,
+ "skipped": 0,
+ "failed": 0
+ },
+ "hits": {
+ "total": 62143,
+ "max_score": 0.0,
+ "hits": [
+
+ ]
+ },
+ "aggregations": {
+ "pods": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "runner-gitlab-runner-7bbfb5dcb5-p6smb",
+ "doc_count": 19795,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "runner-gitlab-runner",
+ "doc_count": 19795
+ }
+ ]
+ }
+ },
+ {
+ "key": "elastic-stack-elasticsearch-master-1",
+ "doc_count": 13185,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "elasticsearch",
+ "doc_count": 13158
+ },
+ {
+ "key": "chown",
+ "doc_count": 24
+ },
+ {
+ "key": "sysctl",
+ "doc_count": 3
+ }
+ ]
+ }
+ },
+ {
+ "key": "ingress-nginx-ingress-controller-76449bcc8d-8qgl6",
+ "doc_count": 3437,
+ "containers": {
+ "doc_count_error_upper_bound": 0,
+ "sum_other_doc_count": 0,
+ "buckets": [
+ {
+ "key": "nginx-ingress-controller",
+ "doc_count": 3437
+ }
+ ]
+ }
+ }
+ ]
+ }
+ }
+}
diff --git a/spec/frontend/jira_import/components/jira_import_form_spec.js b/spec/frontend/jira_import/components/jira_import_form_spec.js
index 315ccccd991..3215ff26bdd 100644
--- a/spec/frontend/jira_import/components/jira_import_form_spec.js
+++ b/spec/frontend/jira_import/components/jira_import_form_spec.js
@@ -1,4 +1,4 @@
-import { GlAvatar, GlNewButton, GlFormSelect, GlLabel } from '@gitlab/ui';
+import { GlAvatar, GlButton, GlFormSelect, GlLabel } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import JiraImportForm from '~/jira_import/components/jira_import_form.vue';
@@ -44,7 +44,7 @@ describe('JiraImportForm', () => {
it('shows a Next button', () => {
const nextButton = wrapper
- .findAll(GlNewButton)
+ .findAll(GlButton)
.at(0)
.text();
@@ -53,7 +53,7 @@ describe('JiraImportForm', () => {
it('shows a Cancel button', () => {
const cancelButton = wrapper
- .findAll(GlNewButton)
+ .findAll(GlButton)
.at(1)
.text();
diff --git a/spec/frontend/snippets/components/snippet_header_spec.js b/spec/frontend/snippets/components/snippet_header_spec.js
index 1b67c08e5a4..16a66c70d6a 100644
--- a/spec/frontend/snippets/components/snippet_header_spec.js
+++ b/spec/frontend/snippets/components/snippet_header_spec.js
@@ -1,7 +1,7 @@
import SnippetHeader from '~/snippets/components/snippet_header.vue';
import DeleteSnippetMutation from '~/snippets/mutations/deleteSnippet.mutation.graphql';
import { ApolloMutation } from 'vue-apollo';
-import { GlNewButton, GlModal } from '@gitlab/ui';
+import { GlButton, GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
describe('Snippet header component', () => {
@@ -89,7 +89,7 @@ describe('Snippet header component', () => {
updateSnippet: false,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(0);
+ expect(wrapper.findAll(GlButton).length).toEqual(0);
createComponent({
permissions: {
@@ -97,7 +97,7 @@ describe('Snippet header component', () => {
updateSnippet: false,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(1);
+ expect(wrapper.findAll(GlButton).length).toEqual(1);
createComponent({
permissions: {
@@ -105,7 +105,7 @@ describe('Snippet header component', () => {
updateSnippet: true,
},
});
- expect(wrapper.findAll(GlNewButton).length).toEqual(2);
+ expect(wrapper.findAll(GlButton).length).toEqual(2);
createComponent({
permissions: {
@@ -117,7 +117,7 @@ describe('Snippet header component', () => {
canCreateSnippet: true,
});
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.findAll(GlNewButton).length).toEqual(3);
+ expect(wrapper.findAll(GlButton).length).toEqual(3);
});
});
diff --git a/spec/frontend/static_site_editor/components/invalid_content_message_spec.js b/spec/frontend/static_site_editor/components/invalid_content_message_spec.js
new file mode 100644
index 00000000000..7e699e9451c
--- /dev/null
+++ b/spec/frontend/static_site_editor/components/invalid_content_message_spec.js
@@ -0,0 +1,23 @@
+import { shallowMount } from '@vue/test-utils';
+
+import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
+
+describe('~/static_site_editor/components/invalid_content_message.vue', () => {
+ let wrapper;
+ const findDocumentationButton = () => wrapper.find({ ref: 'documentationButton' });
+ const documentationUrl =
+ 'https://gitlab.com/gitlab-org/project-templates/static-site-editor-middleman';
+
+ beforeEach(() => {
+ wrapper = shallowMount(InvalidContentMessage);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the configuration button link', () => {
+ expect(findDocumentationButton().exists()).toBe(true);
+ expect(findDocumentationButton().attributes('href')).toBe(documentationUrl);
+ });
+});
diff --git a/spec/frontend/static_site_editor/components/publish_toolbar_spec.js b/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
index f00fc38430f..82eb12d4c4d 100644
--- a/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
+++ b/spec/frontend/static_site_editor/components/publish_toolbar_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { GlNewButton, GlLoadingIcon } from '@gitlab/ui';
+import { GlButton, GlLoadingIcon } from '@gitlab/ui';
import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
@@ -18,7 +18,7 @@ describe('Static Site Editor Toolbar', () => {
};
const findReturnUrlLink = () => wrapper.find({ ref: 'returnUrlLink' });
- const findSaveChangesButton = () => wrapper.find(GlNewButton);
+ const findSaveChangesButton = () => wrapper.find(GlButton);
const findLoadingIndicator = () => wrapper.find(GlLoadingIcon);
beforeEach(() => {
diff --git a/spec/frontend/static_site_editor/components/static_site_editor_spec.js b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
index d427df9bd4b..19a86de9cd8 100644
--- a/spec/frontend/static_site_editor/components/static_site_editor_spec.js
+++ b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
@@ -8,6 +8,7 @@ import createState from '~/static_site_editor/store/state';
import StaticSiteEditor from '~/static_site_editor/components/static_site_editor.vue';
import EditArea from '~/static_site_editor/components/edit_area.vue';
import EditHeader from '~/static_site_editor/components/edit_header.vue';
+import InvalidContentMessage from '~/static_site_editor/components/invalid_content_message.vue';
import PublishToolbar from '~/static_site_editor/components/publish_toolbar.vue';
import { sourceContent, sourceContentTitle } from '../mock_data';
@@ -29,7 +30,10 @@ describe('StaticSiteEditor', () => {
submitChangesActionMock = jest.fn();
store = new Vuex.Store({
- state: createState(initialState),
+ state: createState({
+ isSupportedContent: true,
+ ...initialState,
+ }),
getters: {
contentChanged: () => false,
...getters,
@@ -62,6 +66,7 @@ describe('StaticSiteEditor', () => {
const findEditArea = () => wrapper.find(EditArea);
const findEditHeader = () => wrapper.find(EditHeader);
+ const findInvalidContentMessage = () => wrapper.find(InvalidContentMessage);
const findPublishToolbar = () => wrapper.find(PublishToolbar);
const findSkeletonLoader = () => wrapper.find(GlSkeletonLoader);
@@ -151,6 +156,13 @@ describe('StaticSiteEditor', () => {
expect(findPublishToolbar().props('savingChanges')).toBe(true);
});
+ it('displays invalid content message when content is not supported', () => {
+ buildStore({ initialState: { isSupportedContent: false } });
+ buildWrapper();
+
+ expect(findInvalidContentMessage().exists()).toBe(true);
+ });
+
it('dispatches load content action', () => {
expect(loadContentActionMock).toHaveBeenCalled();
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index d837c793784..4cd03a690e9 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -42,7 +42,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<b-input-group-append-stub
tag="div"
>
- <gl-new-button-stub
+ <gl-button-stub
category="tertiary"
data-clipboard-text="ssh://foo.bar"
icon=""
@@ -55,7 +55,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
size="16"
title="Copy URL"
/>
- </gl-new-button-stub>
+ </gl-button-stub>
</b-input-group-append-stub>
</b-input-group-stub>
</div>
@@ -92,7 +92,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
<b-input-group-append-stub
tag="div"
>
- <gl-new-button-stub
+ <gl-button-stub
category="tertiary"
data-clipboard-text="http://foo.bar"
icon=""
@@ -105,7 +105,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
size="16"
title="Copy URL"
/>
- </gl-new-button-stub>
+ </gl-button-stub>
</b-input-group-append-stub>
</b-input-group-stub>
</div>
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index e5d1d1d690e..d0b54a16747 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -8,6 +8,7 @@ import FilteredSearchVisualTokens from '~/filtered_search/filtered_search_visual
import FilteredSearchDropdownManager from '~/filtered_search/filtered_search_dropdown_manager';
import FilteredSearchManager from '~/filtered_search/filtered_search_manager';
import FilteredSearchSpecHelper from '../helpers/filtered_search_spec_helper';
+import { BACKSPACE_KEY_CODE, DELETE_KEY_CODE } from '~/lib/utils/keycodes';
describe('Filtered Search Manager', function() {
let input;
@@ -17,16 +18,35 @@ describe('Filtered Search Manager', function() {
const placeholder = 'Search or filter results...';
function dispatchBackspaceEvent(element, eventType) {
- const backspaceKey = 8;
const event = new Event(eventType);
- event.keyCode = backspaceKey;
+ event.keyCode = BACKSPACE_KEY_CODE;
element.dispatchEvent(event);
}
function dispatchDeleteEvent(element, eventType) {
- const deleteKey = 46;
const event = new Event(eventType);
- event.keyCode = deleteKey;
+ event.keyCode = DELETE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchAltBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.altKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchCtrlBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.ctrlKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
+ element.dispatchEvent(event);
+ }
+
+ function dispatchMetaBackspaceEvent(element, eventType) {
+ const event = new Event(eventType);
+ event.metaKey = true;
+ event.keyCode = BACKSPACE_KEY_CODE;
element.dispatchEvent(event);
}
@@ -299,6 +319,80 @@ describe('Filtered Search Manager', function() {
});
});
+ describe('checkForAltOrCtrlBackspace', () => {
+ beforeEach(() => {
+ initializeManager();
+ spyOn(FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough();
+ });
+
+ describe('tokens and no input', () => {
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('removes last token via alt-backspace', () => {
+ dispatchAltBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled();
+ });
+
+ it('removes last token via ctrl-backspace', () => {
+ dispatchCtrlBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled();
+ });
+ });
+
+ describe('tokens and input', () => {
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('does not remove token or change input via alt-backspace when there is existing input', () => {
+ input = manager.filteredSearchInput;
+ input.value = 'text';
+ dispatchAltBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled();
+ expect(input.value).toEqual('text');
+ });
+
+ it('does not remove token or change input via ctrl-backspace when there is existing input', () => {
+ input = manager.filteredSearchInput;
+ input.value = 'text';
+ dispatchCtrlBackspaceEvent(input, 'keydown');
+
+ expect(FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled();
+ expect(input.value).toEqual('text');
+ });
+ });
+ });
+
+ describe('checkForMetaBackspace', () => {
+ beforeEach(() => {
+ initializeManager();
+ });
+
+ beforeEach(() => {
+ tokensContainer.innerHTML = FilteredSearchSpecHelper.createTokensContainerHTML(
+ FilteredSearchSpecHelper.createFilterVisualTokenHTML('label', '=', '~bug'),
+ );
+ });
+
+ it('removes all tokens and input', () => {
+ spyOn(FilteredSearchManager.prototype, 'clearSearch').and.callThrough();
+ dispatchMetaBackspaceEvent(input, 'keydown');
+
+ expect(manager.clearSearch).toHaveBeenCalled();
+ expect(manager.filteredSearchInput.value).toEqual('');
+ expect(DropdownUtils.getSearchQuery()).toEqual('');
+ });
+ });
+
describe('removeToken', () => {
beforeEach(() => {
initializeManager();
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 0903ca6f9e8..6674ea059a0 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -42,6 +42,18 @@ describe Gitlab::ApplicationContext do
end
end
+ describe '.current_context_include?' do
+ it 'returns true if the key was present in the context' do
+ described_class.with_context(caller_id: "Hello") do
+ expect(described_class.current_context_include?(:caller_id)).to be(true)
+ end
+ end
+
+ it 'returns false if the key was not present in the current context' do
+ expect(described_class.current_context_include?(:caller_id)).to be(false)
+ end
+ end
+
describe '#to_lazy_hash' do
let(:user) { build(:user) }
let(:project) { build(:project) }
diff --git a/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb b/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb
new file mode 100644
index 00000000000..f150ed4bd2e
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::BackfillPushRulesIdInProjects, :migration, schema: 2020_03_25_162730 do
+ let(:push_rules) { table(:push_rules) }
+ let(:projects) { table(:projects) }
+ let(:project_settings) { table(:project_settings) }
+ let(:namespace) { table(:namespaces).create(name: 'user', path: 'user') }
+
+ subject { described_class.new }
+
+ describe '#perform' do
+ it 'creates new project push_rules for all push rules in the range' do
+ project_1 = projects.create(id: 1, namespace_id: namespace.id)
+ project_2 = projects.create(id: 2, namespace_id: namespace.id)
+ project_3 = projects.create(id: 3, namespace_id: namespace.id)
+ project_settings_1 = project_settings.create(project_id: project_1.id)
+ project_settings_2 = project_settings.create(project_id: project_2.id)
+ project_settings_3 = project_settings.create(project_id: project_3.id)
+ push_rule_1 = push_rules.create(id: 5, is_sample: false, project_id: project_1.id)
+ push_rule_2 = push_rules.create(id: 6, is_sample: false, project_id: project_2.id)
+ push_rules.create(id: 8, is_sample: false, project_id: 3)
+
+ subject.perform(5, 7)
+
+ expect(project_settings_1.reload.push_rule_id).to eq(push_rule_1.id)
+ expect(project_settings_2.reload.push_rule_id).to eq(push_rule_2.id)
+ expect(project_settings_3.reload.push_rule_id).to be_nil
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
index 664009f140f..3de65529e99 100644
--- a/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/group_stage_summary_spec.rb
@@ -127,4 +127,50 @@ describe Gitlab::CycleAnalytics::GroupStageSummary do
end
end
end
+
+ describe '#deployment_frequency' do
+ let(:from) { 6.days.ago }
+ let(:to) { nil }
+
+ subject do
+ described_class.new(group, options: {
+ from: from,
+ to: to,
+ current_user: user
+ }).data.third
+ end
+
+ it 'includes the unit: `per day`' do
+ expect(subject[:unit]).to eq(_('per day'))
+ end
+
+ before do
+ Timecop.freeze(5.days.ago) do
+ create(:deployment, :success, project: project)
+ end
+ end
+
+ context 'when `to` is nil' do
+ it 'includes range until now' do
+ # 1 deployment over 7 days
+ expect(subject[:value]).to eq(0.1)
+ end
+ end
+
+ context 'when `to` is given' do
+ let(:from) { 10.days.ago }
+ let(:to) { 10.days.from_now }
+
+ before do
+ Timecop.freeze(5.days.from_now) do
+ create(:deployment, :success, project: project)
+ end
+ end
+
+ it 'returns deployment frequency within `from` and `to` range' do
+ # 2 deployments over 20 days
+ expect(subject[:value]).to eq(0.1)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index da22da8de0f..519f5873d75 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -83,7 +83,7 @@ describe Gitlab::DataBuilder::Pipeline do
expect(merge_request_attrs[:target_branch]).to eq(merge_request.target_branch)
expect(merge_request_attrs[:target_project_id]).to eq(merge_request.target_project_id)
expect(merge_request_attrs[:state]).to eq(merge_request.state)
- expect(merge_request_attrs[:merge_status]).to eq(merge_request.merge_status)
+ expect(merge_request_attrs[:merge_status]).to eq(merge_request.public_merge_status)
expect(merge_request_attrs[:url]).to eq("http://localhost/#{merge_request.target_project.full_path}/-/merge_requests/#{merge_request.iid}")
end
end
diff --git a/spec/lib/gitlab/elasticsearch/logs_spec.rb b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
index 6b9d1dbef99..8b6a19fa2c5 100644
--- a/spec/lib/gitlab/elasticsearch/logs_spec.rb
+++ b/spec/lib/gitlab/elasticsearch/logs/lines_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe Gitlab::Elasticsearch::Logs do
+describe Gitlab::Elasticsearch::Logs::Lines do
let(:client) { Elasticsearch::Transport::Client }
let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", pod: "production-6866bc8974-m4sk4", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
diff --git a/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb
new file mode 100644
index 00000000000..0a4ab0780c5
--- /dev/null
+++ b/spec/lib/gitlab/elasticsearch/logs/pods_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Elasticsearch::Logs::Pods do
+ let(:client) { Elasticsearch::Transport::Client }
+
+ let(:es_query) { JSON.parse(fixture_file('lib/elasticsearch/pods_query.json'), symbolize_names: true) }
+ let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/pods_response.json')) }
+ let(:namespace) { "autodevops-deploy-9-production" }
+
+ subject { described_class.new(client) }
+
+ describe '#pods' do
+ it 'returns the pods' do
+ expect(client).to receive(:search).with(body: es_query).and_return(es_response)
+
+ result = subject.pods(namespace)
+ expect(result).to eq([
+ {
+ name: "runner-gitlab-runner-7bbfb5dcb5-p6smb",
+ container_names: %w[runner-gitlab-runner]
+ },
+ {
+ name: "elastic-stack-elasticsearch-master-1",
+ container_names: %w[elasticsearch chown sysctl]
+ },
+ {
+ name: "ingress-nginx-ingress-controller-76449bcc8d-8qgl6",
+ container_names: %w[nginx-ingress-controller]
+ }
+ ])
+ end
+ end
+end
diff --git a/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb b/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb
new file mode 100644
index 00000000000..77648f5c64a
--- /dev/null
+++ b/spec/migrations/schedule_backfill_push_rules_id_in_projects_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20200325162730_schedule_backfill_push_rules_id_in_projects.rb')
+
+describe ScheduleBackfillPushRulesIdInProjects do
+ let(:push_rules) { table(:push_rules) }
+
+ it 'adds global rule association to application settings' do
+ application_settings = table(:application_settings)
+ setting = application_settings.create!
+ sample_rule = push_rules.create!(is_sample: true)
+
+ Sidekiq::Testing.fake! do
+ disable_migrations_output { migrate! }
+ end
+
+ setting.reload
+ expect(setting.push_rule_id).to eq(sample_rule.id)
+ end
+
+ it 'schedules worker to migrate project push rules' do
+ rule_1 = push_rules.create!
+ rule_2 = push_rules.create!
+
+ Sidekiq::Testing.fake! do
+ disable_migrations_output { migrate! }
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(5.minutes, rule_1.id, rule_2.id)
+ end
+ end
+end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 80b619ed2b1..6f6ff3704b4 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -349,13 +349,16 @@ describe Ci::JobArtifact do
end
describe 'file is being stored' do
- context 'when object has nil store' do
- it 'is stored locally' do
- subject = build(:ci_job_artifact, :archive, file_store: nil)
+ subject { create(:ci_job_artifact, :archive) }
- subject.save
+ context 'when object has nil store' do
+ before do
+ subject.update_column(:file_store, nil)
+ subject.reload
+ end
- expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
+ it 'is stored locally' do
+ expect(subject.file_store).to be(nil)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
end
@@ -363,10 +366,6 @@ describe Ci::JobArtifact do
context 'when existing object has local store' do
it 'is stored locally' do
- subject = build(:ci_job_artifact, :archive)
-
- subject.save
-
expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
@@ -380,10 +379,6 @@ describe Ci::JobArtifact do
context 'when file is stored' do
it 'is stored remotely' do
- subject = build(:ci_job_artifact, :archive)
-
- subject.save
-
expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE)
expect(subject.file).not_to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::REMOTE)
diff --git a/spec/models/cycle_analytics/group_level_spec.rb b/spec/models/cycle_analytics/group_level_spec.rb
index 1f410a7c539..5ba0f078df1 100644
--- a/spec/models/cycle_analytics/group_level_spec.rb
+++ b/spec/models/cycle_analytics/group_level_spec.rb
@@ -38,7 +38,7 @@ describe CycleAnalytics::GroupLevel do
end
it 'returns medians for each stage for a specific group' do
- expect(subject.summary.map { |summary| summary[:value] }).to contain_exactly(1, 1)
+ expect(subject.summary.map { |summary| summary[:value] }).to contain_exactly(0.1, 1, 1)
end
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 50bb194ef71..52cd31ee65f 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2335,6 +2335,21 @@ describe MergeRequest do
end
end
+ describe "#public_merge_status" do
+ using RSpec::Parameterized::TableSyntax
+ subject { build(:merge_request, merge_status: status) }
+
+ where(:status, :public_status) do
+ 'cannot_be_merged_rechecking' | 'checking'
+ 'checking' | 'checking'
+ 'cannot_be_merged' | 'cannot_be_merged'
+ end
+
+ with_them do
+ it { expect(subject.public_merge_status).to eq(public_status) }
+ end
+ end
+
describe "#head_pipeline_active? " do
it do
is_expected
@@ -3226,20 +3241,51 @@ describe MergeRequest do
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
- subject.mark_as_unmergeable
- subject.mark_as_unchecked
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_unmergeable!
+ end
+
+ it 'notifies conflict, but does not notify again if rechecking still results in cannot_be_merged with async mergeability check' do
+ expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
+ expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
+
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
end
it 'notifies conflict, whenever newly unmergeable' do
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
- subject.mark_as_unmergeable
- subject.mark_as_unchecked
- subject.mark_as_mergeable
- subject.mark_as_unchecked
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_mergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_unmergeable!
+ end
+
+ it 'notifies conflict, whenever newly unmergeable with async mergeability check' do
+ expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
+ expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
+
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_mergeable!
+
+ subject.mark_as_unchecked!
+ subject.mark_as_checking!
+ subject.mark_as_unmergeable!
end
it 'does not notify whenever merge request is newly unmergeable due to other reasons' do
@@ -3248,7 +3294,7 @@ describe MergeRequest do
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
end
end
end
@@ -3261,7 +3307,7 @@ describe MergeRequest do
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
- subject.mark_as_unmergeable
+ subject.mark_as_unmergeable!
end
end
end
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index a1b3111ff71..8d8c31c335d 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -130,4 +130,15 @@ describe 'getting merge request information nested in a project' do
expect(merge_requests_graphql_data.size).to eq 2
end
end
+
+ context 'when merge request is cannot_be_merged_rechecking' do
+ before do
+ merge_request.update!(merge_status: 'cannot_be_merged_rechecking')
+ end
+
+ it 'returns checking' do
+ post_graphql(query, current_user: current_user)
+ expect(merge_request_graphql_data['mergeStatus']).to eq('checking')
+ end
+ end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index a8543c8e282..40d6f171116 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -1060,6 +1060,14 @@ describe API::MergeRequests do
expect(json_response['user']['can_merge']).to be_falsy
end
+ it 'returns `checking` as its merge_status instead of `cannot_be_merged_rechecking`' do
+ merge_request.update!(merge_status: 'cannot_be_merged_rechecking')
+
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user)
+
+ expect(json_response['merge_status']).to eq 'checking'
+ end
+
context 'when merge request is unchecked' do
before do
merge_request.mark_as_unchecked!
diff --git a/spec/serializers/discussion_entity_spec.rb b/spec/serializers/discussion_entity_spec.rb
index 4adf1dc5994..b441fd08b98 100644
--- a/spec/serializers/discussion_entity_spec.rb
+++ b/spec/serializers/discussion_entity_spec.rb
@@ -73,9 +73,19 @@ describe DiscussionEntity do
:diff_file,
:truncated_diff_lines,
:position,
+ :positions,
+ :line_codes,
:line_code,
:active
)
end
+
+ context 'diff_head_compare feature is disabled' do
+ it 'does not expose positions and line_codes attributes' do
+ stub_feature_flags(merge_ref_head_comments: false)
+
+ expect(subject.keys).not_to include(:positions, :line_codes)
+ end
+ end
end
end
diff --git a/spec/serializers/merge_request_basic_entity_spec.rb b/spec/serializers/merge_request_basic_entity_spec.rb
new file mode 100644
index 00000000000..53ba66a79ac
--- /dev/null
+++ b/spec/serializers/merge_request_basic_entity_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe MergeRequestBasicEntity do
+ let(:resource) { build(:merge_request) }
+
+ subject do
+ described_class.new(resource).as_json
+ end
+
+ it 'has public_merge_status as merge_status' do
+ expect(resource).to receive(:public_merge_status).and_return('checking')
+
+ expect(subject[:merge_status]).to eq 'checking'
+ end
+end
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 685abbf7e6c..9f96e5711a4 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -19,6 +19,12 @@ describe MergeRequestPollCachedWidgetEntity do
is_expected.to include(:target_branch_sha)
end
+ it 'has public_merge_status as merge_status' do
+ expect(resource).to receive(:public_merge_status).and_return('checking')
+
+ expect(subject[:merge_status]).to eq 'checking'
+ end
+
describe 'diverged_commits_count' do
context 'when MR open and its diverging' do
it 'returns diverged commits count' do
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
index fb53321352b..3ec5dc68c60 100644
--- a/spec/services/pod_logs/base_service_spec.rb
+++ b/spec/services/pod_logs/base_service_spec.rb
@@ -13,10 +13,16 @@ describe ::PodLogs::BaseService do
let(:container_name) { 'container-0' }
let(:params) { {} }
let(:raw_pods) do
- JSON.parse([
- kube_pod(name: pod_name),
- kube_pod(name: pod_name_2)
- ].to_json, object_class: OpenStruct)
+ [
+ {
+ name: pod_name,
+ container_names: %w(container-0-0 container-0-1)
+ },
+ {
+ name: pod_name_2,
+ container_names: %w(container-1-0 container-1-1)
+ }
+ ]
end
subject { described_class.new(cluster, namespace, params: params) }
@@ -99,19 +105,6 @@ describe ::PodLogs::BaseService do
end
end
- describe '#get_raw_pods' do
- let(:service) { create(:cluster_platform_kubernetes, :configured) }
-
- it 'returns success with passthrough k8s response' do
- stub_kubeclient_pods(namespace)
-
- result = subject.send(:get_raw_pods, {})
-
- expect(result[:status]).to eq(:success)
- expect(result[:raw_pods].first).to be_a(Kubeclient::Resource)
- end
- end
-
describe '#get_pod_names' do
it 'returns success with a list of pods' do
result = subject.send(:get_pod_names, raw_pods: raw_pods)
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
index 39aa910d878..e3efce1134b 100644
--- a/spec/services/pod_logs/elasticsearch_service_spec.rb
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -21,8 +21,63 @@ describe ::PodLogs::ElasticsearchService do
]
end
+ let(:raw_pods) do
+ [
+ {
+ name: pod_name,
+ container_names: [container_name, "#{container_name}-1"]
+ }
+ ]
+ end
+
subject { described_class.new(cluster, namespace, params: params) }
+ describe '#get_raw_pods' do
+ before do
+ create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
+ end
+
+ it 'returns success with elasticsearch response' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
+ .to receive(:pods)
+ .with(namespace)
+ .and_return(raw_pods)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods]).to eq(raw_pods)
+ end
+
+ it 'returns an error when ES is unreachable' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(nil)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to connect to Elasticsearch')
+ end
+
+ it 'handles server errors from elasticsearch' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Pods)
+ .to receive(:pods)
+ .and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
+ end
+ end
+
describe '#check_times' do
context 'with start and end provided and valid' do
let(:params) do
@@ -168,7 +223,7 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
.with(namespace, pod_name: pod_name, container_name: container_name, search: search, start_time: start_time, end_time: end_time, cursor: cursor)
.and_return({ logs: expected_logs, cursor: expected_cursor })
@@ -195,7 +250,7 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
.and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
@@ -209,9 +264,9 @@ describe ::PodLogs::ElasticsearchService do
allow_any_instance_of(::Clusters::Applications::ElasticStack)
.to receive(:elasticsearch_client)
.and_return(Elasticsearch::Transport::Client.new)
- allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs::Lines)
.to receive(:pod_logs)
- .and_raise(::Gitlab::Elasticsearch::Logs::InvalidCursor.new)
+ .and_raise(::Gitlab::Elasticsearch::Logs::Lines::InvalidCursor.new)
result = subject.send(:pod_logs, result_arg)
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
index ff0554bbe5c..da89c7ee117 100644
--- a/spec/services/pod_logs/kubernetes_service_spec.rb
+++ b/spec/services/pod_logs/kubernetes_service_spec.rb
@@ -20,14 +20,36 @@ describe ::PodLogs::KubernetesService do
end
let(:raw_pods) do
- JSON.parse([
- kube_pod(name: pod_name),
- kube_pod(name: pod_name_2, container_name: container_name_2)
- ].to_json, object_class: OpenStruct)
+ [
+ {
+ name: pod_name,
+ container_names: [container_name, "#{container_name}-1"]
+ },
+ {
+ name: pod_name_2,
+ container_names: [container_name_2, "#{container_name_2}-1"]
+ }
+ ]
end
subject { described_class.new(cluster, namespace, params: params) }
+ describe '#get_raw_pods' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns success with passthrough k8s response' do
+ stub_kubeclient_pods(namespace)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods]).to eq([{
+ name: 'kube-pod',
+ container_names: %w(container-0 container-0-1)
+ }])
+ end
+ end
+
describe '#pod_logs' do
let(:result_arg) do
{
@@ -233,7 +255,7 @@ describe ::PodLogs::KubernetesService do
end
it 'returns error if container_name was not specified and there are no containers on the pod' do
- raw_pods.first.spec.containers = []
+ raw_pods.first[:container_names] = []
result = subject.send(:check_container_name,
pod_name: pod_name,
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 140595e58ad..71eff23c77c 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -78,8 +78,7 @@ describe RecordsUploads do
path: File.join('uploads', 'rails_sample.jpg'),
size: 512.kilobytes,
model: build_stubbed(:user),
- uploader: uploader.class.to_s,
- store: ::ObjectStorage::Store::LOCAL
+ uploader: uploader.class.to_s
)
uploader.upload = existing
@@ -99,8 +98,7 @@ describe RecordsUploads do
path: File.join('uploads', 'rails_sample.jpg'),
size: 512.kilobytes,
model: project,
- uploader: uploader.class.to_s,
- store: ::ObjectStorage::Store::LOCAL
+ uploader: uploader.class.to_s
)
uploader.store!(upload_fixture('rails_sample.jpg'))
diff --git a/spec/workers/concerns/cronjob_queue_spec.rb b/spec/workers/concerns/cronjob_queue_spec.rb
index ea3b7bad2e1..0cea67bf116 100644
--- a/spec/workers/concerns/cronjob_queue_spec.rb
+++ b/spec/workers/concerns/cronjob_queue_spec.rb
@@ -14,6 +14,10 @@ describe CronjobQueue do
end
end
+ before do
+ stub_const("DummyWorker", worker)
+ end
+
it 'sets the queue name of a worker' do
expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob:dummy')
end
@@ -29,4 +33,22 @@ describe CronjobQueue do
expect(worker_context[:root_namespace]).to be_nil
expect(worker_context[:project]).to be_nil
end
+
+ it 'gets scheduled with caller_id set to Cronjob' do
+ worker.perform_async
+
+ job = worker.jobs.last
+
+ expect(job).to include('meta.caller_id' => 'Cronjob')
+ end
+
+ it 'does not set the caller_id if there was already one in the context' do
+ Gitlab::ApplicationContext.with_context(caller_id: 'already set') do
+ worker.perform_async
+ end
+
+ job = worker.jobs.last
+
+ expect(job).to include('meta.caller_id' => 'already set')
+ end
end