summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorLin Jen-Shin <godfat@godfat.org>2016-11-22 18:46:35 +0800
committerLin Jen-Shin <godfat@godfat.org>2016-11-22 18:46:35 +0800
commit17388eb034de3808c5c7f80c19726575dd073ce1 (patch)
tree2f39deac4753e2a796210a42ba477ed27c6bbe13 /spec
parentc7c4850d0b9d6751a5f6fdaa1f9c34aee6728676 (diff)
parentacaa6d733b37b18eb44a5d6e3b79f69ee821f62a (diff)
downloadgitlab-ce-17388eb034de3808c5c7f80c19726575dd073ce1.tar.gz
Merge remote-tracking branch 'upstream/master' into fix-cancelling-pipelines
* upstream/master: (133 commits) Restructure steps for MM slash commands service Add Changelog entry for CI linter validation fix Fix entry lookup in CI config inheritance rules Extend specs for global ci configuration entry Remove unnecessary require_relative calls from service classes Use single quote for strings Ue svg from SVGs object Dont trigger CI builds [ci skip] Revert "Test only migrations" Add custom copy for each empty stage Refactor Mattermost slash commands docs Fetch only one revision Highlight nav item on hover Test only migrations Fix migration paths tests Scroll CA stage panel on mobile Fix CSS declaration administer to administrator Move SVGs to JS objects for easy reuse Improve deploy command message ...
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/projects/cycle_analytics_controller_spec.rb43
-rw-r--r--spec/factories/ci/builds.rb6
-rw-r--r--spec/features/issues/filter_issues_spec.rb10
-rw-r--r--spec/features/issues_spec.rb5
-rw-r--r--spec/features/merge_requests/deleted_source_branch_spec.rb30
-rw-r--r--spec/features/merge_requests/merge_request_versions_spec.rb7
-rw-r--r--spec/javascripts/activities_spec.js.es62
-rw-r--r--spec/javascripts/environments/environment_item_spec.js.es62
-rw-r--r--spec/javascripts/merge_request_widget_spec.js54
-rw-r--r--spec/javascripts/pretty_time_spec.js.es6134
-rw-r--r--spec/javascripts/smart_interval_spec.js.es6159
-rw-r--r--spec/javascripts/subbable_resource_spec.js.es665
-rw-r--r--spec/lib/gitlab/chat_commands/command_spec.rb43
-rw-r--r--spec/lib/gitlab/chat_commands/deploy_spec.rb79
-rw-r--r--spec/lib/gitlab/ci/build/credentials/factory_spec.rb38
-rw-r--r--spec/lib/gitlab/ci/build/credentials/registry_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/config/entry/global_spec.rb51
-rw-r--r--spec/lib/gitlab/cycle_analytics/permissions_spec.rb127
-rw-r--r--spec/lib/gitlab/file_detector_spec.rb59
-rw-r--r--spec/models/environment_spec.rb12
-rw-r--r--spec/models/project_spec.rb2
-rw-r--r--spec/models/repository_spec.rb477
-rw-r--r--spec/requests/api/branches_spec.rb2
-rw-r--r--spec/requests/api/project_snippets_spec.rb64
-rw-r--r--spec/requests/ci/api/builds_spec.rb39
-rw-r--r--spec/routing/routing_spec.rb2
-rw-r--r--spec/services/git_push_service_spec.rb104
-rw-r--r--spec/services/git_tag_push_service_spec.rb8
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb10
-rw-r--r--spec/services/system_note_service_spec.rb65
-rw-r--r--spec/workers/project_cache_worker_spec.rb86
31 files changed, 1469 insertions, 357 deletions
diff --git a/spec/controllers/projects/cycle_analytics_controller_spec.rb b/spec/controllers/projects/cycle_analytics_controller_spec.rb
new file mode 100644
index 00000000000..a971adf0539
--- /dev/null
+++ b/spec/controllers/projects/cycle_analytics_controller_spec.rb
@@ -0,0 +1,43 @@
+require 'spec_helper'
+
+describe Projects::CycleAnalyticsController do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ project.team << [user, :master]
+ end
+
+ describe 'cycle analytics not set up flag' do
+ context 'with no data' do
+ it 'is true' do
+ get(:show,
+ namespace_id: project.namespace.to_param,
+ project_id: project.to_param)
+
+ expect(response).to be_success
+ expect(assigns(:cycle_analytics_no_data)).to eq(true)
+ end
+ end
+
+ context 'with data' do
+ before do
+ issue = create(:issue, project: project, created_at: 4.days.ago)
+ milestone = create(:milestone, project: project, created_at: 5.days.ago)
+ issue.update(milestone: milestone)
+
+ create_merge_request_closing_issue(issue)
+ end
+
+ it 'is false' do
+ get(:show,
+ namespace_id: project.namespace.to_param,
+ project_id: project.to_param)
+
+ expect(response).to be_success
+ expect(assigns(:cycle_analytics_no_data)).to eq(false)
+ end
+ end
+ end
+end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index e7fe489e5eb..c443af09075 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -59,6 +59,12 @@ FactoryGirl.define do
self.when 'manual'
end
+ trait :teardown_environment do
+ options do
+ { environment: { action: 'stop' } }
+ end
+ end
+
trait :allowed_to_fail do
allow_failure true
end
diff --git a/spec/features/issues/filter_issues_spec.rb b/spec/features/issues/filter_issues_spec.rb
index 2798db92f0f..0d19563d628 100644
--- a/spec/features/issues/filter_issues_spec.rb
+++ b/spec/features/issues/filter_issues_spec.rb
@@ -3,8 +3,8 @@ require 'rails_helper'
describe 'Filter issues', feature: true do
include WaitForAjax
- let!(:project) { create(:project) }
let!(:group) { create(:group) }
+ let!(:project) { create(:project, group: group) }
let!(:user) { create(:user)}
let!(:milestone) { create(:milestone, project: project) }
let!(:label) { create(:label, project: project) }
@@ -127,7 +127,7 @@ describe 'Filter issues', feature: true do
expect(page).to have_content wontfix.title
end
- find('body').click
+ find('.dropdown-menu-close-icon').click
expect(find('.filtered-labels')).to have_content(wontfix.title)
@@ -135,7 +135,7 @@ describe 'Filter issues', feature: true do
wait_for_ajax
find('.dropdown-menu-labels a', text: label.title).click
- find('body').click
+ find('.dropdown-menu-close-icon').click
expect(find('.filtered-labels')).to have_content(wontfix.title)
expect(find('.filtered-labels')).to have_content(label.title)
@@ -150,8 +150,8 @@ describe 'Filter issues', feature: true do
it "selects and unselects `won't fix`" do
find('.dropdown-menu-labels a', text: wontfix.title).click
find('.dropdown-menu-labels a', text: wontfix.title).click
- # Close label dropdown to load
- find('body').click
+
+ find('.dropdown-menu-close-icon').click
expect(page).not_to have_css('.filtered-labels')
end
end
diff --git a/spec/features/issues_spec.rb b/spec/features/issues_spec.rb
index cdd02a8c8e3..5c958455604 100644
--- a/spec/features/issues_spec.rb
+++ b/spec/features/issues_spec.rb
@@ -371,10 +371,12 @@ describe 'Issues', feature: true do
describe 'when I want to reset my incoming email token' do
let(:project1) { create(:project, namespace: @user.namespace) }
+ let(:issue) { create(:issue, project: project1) }
before do
allow(Gitlab.config.incoming_email).to receive(:enabled).and_return(true)
project1.team << [@user, :master]
+ project1.issues << issue
visit namespace_project_issues_path(@user.namespace, project1)
end
@@ -576,7 +578,10 @@ describe 'Issues', feature: true do
describe 'new issue by email' do
shared_examples 'show the email in the modal' do
+ let(:issue) { create(:issue, project: project) }
+
before do
+ project.issues << issue
stub_incoming_email_setting(enabled: true, address: "p+%{key}@gl.ab")
visit namespace_project_issues_path(project.namespace, project)
diff --git a/spec/features/merge_requests/deleted_source_branch_spec.rb b/spec/features/merge_requests/deleted_source_branch_spec.rb
new file mode 100644
index 00000000000..778b3a90cf3
--- /dev/null
+++ b/spec/features/merge_requests/deleted_source_branch_spec.rb
@@ -0,0 +1,30 @@
+require 'spec_helper'
+
+describe 'Deleted source branch', feature: true, js: true do
+ let(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request) }
+
+ before do
+ login_as user
+ merge_request.project.team << [user, :master]
+ merge_request.update!(source_branch: 'this-branch-does-not-exist')
+ visit namespace_project_merge_request_path(
+ merge_request.project.namespace,
+ merge_request.project, merge_request
+ )
+ end
+
+ it 'shows a message about missing source branch' do
+ expect(page).to have_content(
+ 'Source branch this-branch-does-not-exist does not exist'
+ )
+ end
+
+ it 'hides Discussion, Commits and Changes tabs' do
+ within '.merge-request-details' do
+ expect(page).to have_no_content('Discussion')
+ expect(page).to have_no_content('Commits')
+ expect(page).to have_no_content('Changes')
+ end
+ end
+end
diff --git a/spec/features/merge_requests/merge_request_versions_spec.rb b/spec/features/merge_requests/merge_request_versions_spec.rb
index 23cee891bac..09451f41de4 100644
--- a/spec/features/merge_requests/merge_request_versions_spec.rb
+++ b/spec/features/merge_requests/merge_request_versions_spec.rb
@@ -3,11 +3,12 @@ require 'spec_helper'
feature 'Merge Request versions', js: true, feature: true do
let(:merge_request) { create(:merge_request, importing: true) }
let(:project) { merge_request.source_project }
+ let!(:merge_request_diff1) { merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
+ let!(:merge_request_diff2) { merge_request.merge_request_diffs.create(head_commit_sha: nil) }
+ let!(:merge_request_diff3) { merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
before do
login_as :admin
- merge_request.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9')
- merge_request.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e')
visit diffs_namespace_project_merge_request_path(project.namespace, project, merge_request)
end
@@ -53,7 +54,7 @@ feature 'Merge Request versions', js: true, feature: true do
project.namespace,
project,
merge_request.iid,
- diff_id: 2,
+ diff_id: merge_request_diff3.id,
start_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9'
)
end
diff --git a/spec/javascripts/activities_spec.js.es6 b/spec/javascripts/activities_spec.js.es6
index 9d855ef1060..8640cd44085 100644
--- a/spec/javascripts/activities_spec.js.es6
+++ b/spec/javascripts/activities_spec.js.es6
@@ -35,7 +35,7 @@
describe('Activities', () => {
beforeEach(() => {
fixture.load(fixtureTemplate);
- new Activities();
+ new gl.Activities();
});
for(let i = 0; i < filters.length; i++) {
diff --git a/spec/javascripts/environments/environment_item_spec.js.es6 b/spec/javascripts/environments/environment_item_spec.js.es6
index 54c93367b17..3c15e3b7719 100644
--- a/spec/javascripts/environments/environment_item_spec.js.es6
+++ b/spec/javascripts/environments/environment_item_spec.js.es6
@@ -135,7 +135,7 @@ describe('Environment item', () => {
});
it('should render environment name', () => {
- expect(component.$el.querySelector('.environment-name').textContent).toEqual(environment.name);
+ expect(component.$el.querySelector('.environment-name').textContent).toContain(environment.name);
});
describe('With deployment', () => {
diff --git a/spec/javascripts/merge_request_widget_spec.js b/spec/javascripts/merge_request_widget_spec.js
index 575b87e6f17..f38e9cb8ef5 100644
--- a/spec/javascripts/merge_request_widget_spec.js
+++ b/spec/javascripts/merge_request_widget_spec.js
@@ -1,6 +1,7 @@
/* eslint-disable space-before-function-paren, quotes, comma-dangle, dot-notation, indent, quote-props, no-var, padded-blocks, max-len */
/*= require merge_request_widget */
-/*= require lib/utils/timeago.js */
+/*= require lib/utils/timeago */
+/*= require lib/utils/datetime_utility */
(function() {
describe('MergeRequestWidget', function() {
@@ -54,6 +55,57 @@
});
});
+ describe('renderEnvironments', function() {
+ describe('should render correct timeago', function() {
+ beforeEach(function() {
+ this.environments = [{
+ id: 'test-environment-id',
+ url: 'testurl',
+ deployed_at: new Date().toISOString(),
+ deployed_at_formatted: true
+ }];
+ });
+
+ function getTimeagoText(template) {
+ var el = document.createElement('html');
+ el.innerHTML = template;
+ return el.querySelector('.js-environment-timeago').innerText.trim();
+ }
+
+ it('should render less than a minute ago text', function() {
+ spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
+ expect(getTimeagoText(template)).toBe('less than a minute ago.');
+ });
+
+ this.class.renderEnvironments(this.environments);
+ });
+
+ it('should render about an hour ago text', function() {
+ var oneHourAgo = new Date();
+ oneHourAgo.setHours(oneHourAgo.getHours() - 1);
+
+ this.environments[0].deployed_at = oneHourAgo.toISOString();
+ spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
+ expect(getTimeagoText(template)).toBe('about an hour ago.');
+ });
+
+ this.class.renderEnvironments(this.environments);
+ });
+
+ it('should render about 2 hours ago text', function() {
+ var twoHoursAgo = new Date();
+ twoHoursAgo.setHours(twoHoursAgo.getHours() - 2);
+
+ this.environments[0].deployed_at = twoHoursAgo.toISOString();
+ spyOn(this.class.$widgetBody, 'before').and.callFake(function(template) {
+ expect(getTimeagoText(template)).toBe('about 2 hours ago.');
+ });
+
+ this.class.renderEnvironments(this.environments);
+ });
+ });
+ });
+
return describe('getCIStatus', function() {
beforeEach(function() {
this.ciStatusData = {
diff --git a/spec/javascripts/pretty_time_spec.js.es6 b/spec/javascripts/pretty_time_spec.js.es6
new file mode 100644
index 00000000000..2e12d45f7a7
--- /dev/null
+++ b/spec/javascripts/pretty_time_spec.js.es6
@@ -0,0 +1,134 @@
+//= require lib/utils/pretty_time
+
+(() => {
+ const PrettyTime = gl.PrettyTime;
+
+ describe('PrettyTime methods', function () {
+ describe('parseSeconds', function () {
+ it('should correctly parse a negative value', function () {
+ const parser = PrettyTime.parseSeconds;
+
+ const zeroSeconds = parser(-1000);
+
+ expect(zeroSeconds.minutes).toBe(16);
+ expect(zeroSeconds.hours).toBe(0);
+ expect(zeroSeconds.days).toBe(0);
+ expect(zeroSeconds.weeks).toBe(0);
+ });
+
+ it('should correctly parse a zero value', function () {
+ const parser = PrettyTime.parseSeconds;
+
+ const zeroSeconds = parser(0);
+
+ expect(zeroSeconds.minutes).toBe(0);
+ expect(zeroSeconds.hours).toBe(0);
+ expect(zeroSeconds.days).toBe(0);
+ expect(zeroSeconds.weeks).toBe(0);
+ });
+
+ it('should correctly parse a small non-zero second values', function () {
+ const parser = PrettyTime.parseSeconds;
+
+ const subOneMinute = parser(10);
+
+ expect(subOneMinute.minutes).toBe(0);
+ expect(subOneMinute.hours).toBe(0);
+ expect(subOneMinute.days).toBe(0);
+ expect(subOneMinute.weeks).toBe(0);
+
+ const aboveOneMinute = parser(100);
+
+ expect(aboveOneMinute.minutes).toBe(1);
+ expect(aboveOneMinute.hours).toBe(0);
+ expect(aboveOneMinute.days).toBe(0);
+ expect(aboveOneMinute.weeks).toBe(0);
+
+ const manyMinutes = parser(1000);
+
+ expect(manyMinutes.minutes).toBe(16);
+ expect(manyMinutes.hours).toBe(0);
+ expect(manyMinutes.days).toBe(0);
+ expect(manyMinutes.weeks).toBe(0);
+ });
+
+ it('should correctly parse large second values', function () {
+ const parser = PrettyTime.parseSeconds;
+
+ const aboveOneHour = parser(4800);
+
+ expect(aboveOneHour.minutes).toBe(20);
+ expect(aboveOneHour.hours).toBe(1);
+ expect(aboveOneHour.days).toBe(0);
+ expect(aboveOneHour.weeks).toBe(0);
+
+ const aboveOneDay = parser(110000);
+
+ expect(aboveOneDay.minutes).toBe(33);
+ expect(aboveOneDay.hours).toBe(6);
+ expect(aboveOneDay.days).toBe(3);
+ expect(aboveOneDay.weeks).toBe(0);
+
+ const aboveOneWeek = parser(25000000);
+
+ expect(aboveOneWeek.minutes).toBe(26);
+ expect(aboveOneWeek.hours).toBe(0);
+ expect(aboveOneWeek.days).toBe(3);
+ expect(aboveOneWeek.weeks).toBe(173);
+ });
+ });
+
+ describe('stringifyTime', function () {
+ it('should stringify values with all non-zero units', function () {
+ const timeObject = {
+ weeks: 1,
+ days: 4,
+ hours: 7,
+ minutes: 20,
+ };
+
+ const timeString = PrettyTime.stringifyTime(timeObject);
+
+ expect(timeString).toBe('1w 4d 7h 20m');
+ });
+
+ it('should stringify values with some non-zero units', function () {
+ const timeObject = {
+ weeks: 0,
+ days: 4,
+ hours: 0,
+ minutes: 20,
+ };
+
+ const timeString = PrettyTime.stringifyTime(timeObject);
+
+ expect(timeString).toBe('4d 20m');
+ });
+
+ it('should stringify values with no non-zero units', function () {
+ const timeObject = {
+ weeks: 0,
+ days: 0,
+ hours: 0,
+ minutes: 0,
+ };
+
+ const timeString = PrettyTime.stringifyTime(timeObject);
+
+ expect(timeString).toBe('0m');
+ });
+ });
+
+ describe('abbreviateTime', function () {
+ it('should abbreviate stringified times for weeks', function () {
+ const fullTimeString = '1w 3d 4h 5m';
+ expect(PrettyTime.abbreviateTime(fullTimeString)).toBe('1w');
+ });
+
+ it('should abbreviate stringified times for non-weeks', function () {
+ const fullTimeString = '0w 3d 4h 5m';
+ expect(PrettyTime.abbreviateTime(fullTimeString)).toBe('3d');
+ });
+ });
+ });
+})(window.gl || (window.gl = {}));
diff --git a/spec/javascripts/smart_interval_spec.js.es6 b/spec/javascripts/smart_interval_spec.js.es6
new file mode 100644
index 00000000000..651d1f0f975
--- /dev/null
+++ b/spec/javascripts/smart_interval_spec.js.es6
@@ -0,0 +1,159 @@
+//= require jquery
+//= require smart_interval
+
+(() => {
+ const DEFAULT_MAX_INTERVAL = 100;
+ const DEFAULT_STARTING_INTERVAL = 5;
+ const DEFAULT_SHORT_TIMEOUT = 75;
+ const DEFAULT_LONG_TIMEOUT = 1000;
+ const DEFAULT_INCREMENT_FACTOR = 2;
+
+ function createDefaultSmartInterval(config) {
+ const defaultParams = {
+ callback: () => {},
+ startingInterval: DEFAULT_STARTING_INTERVAL,
+ maxInterval: DEFAULT_MAX_INTERVAL,
+ incrementByFactorOf: DEFAULT_INCREMENT_FACTOR,
+ delayStartBy: 0,
+ lazyStart: false,
+ };
+
+ if (config) {
+ _.extend(defaultParams, config);
+ }
+
+ return new gl.SmartInterval(defaultParams);
+ }
+
+ describe('SmartInterval', function () {
+ describe('Increment Interval', function () {
+ beforeEach(function () {
+ this.smartInterval = createDefaultSmartInterval();
+ });
+
+ it('should increment the interval delay', function (done) {
+ const interval = this.smartInterval;
+ setTimeout(() => {
+ const intervalConfig = this.smartInterval.cfg;
+ const iterationCount = 4;
+ const maxIntervalAfterIterations = intervalConfig.startingInterval *
+ Math.pow(intervalConfig.incrementByFactorOf, (iterationCount - 1)); // 40
+ const currentInterval = interval.getCurrentInterval();
+
+ // Provide some flexibility for performance of testing environment
+ expect(currentInterval).toBeGreaterThan(intervalConfig.startingInterval);
+ expect(currentInterval <= maxIntervalAfterIterations).toBeTruthy();
+
+ done();
+ }, DEFAULT_SHORT_TIMEOUT); // 4 iterations, increment by 2x = (5 + 10 + 20 + 40)
+ });
+
+ it('should not increment past maxInterval', function (done) {
+ const interval = this.smartInterval;
+
+ setTimeout(() => {
+ const currentInterval = interval.getCurrentInterval();
+ expect(currentInterval).toBe(interval.cfg.maxInterval);
+
+ done();
+ }, DEFAULT_LONG_TIMEOUT);
+ });
+ });
+
+ describe('Public methods', function () {
+ beforeEach(function () {
+ this.smartInterval = createDefaultSmartInterval();
+ });
+
+ it('should cancel an interval', function (done) {
+ const interval = this.smartInterval;
+
+ setTimeout(() => {
+ interval.cancel();
+
+ const intervalId = interval.state.intervalId;
+ const currentInterval = interval.getCurrentInterval();
+ const intervalLowerLimit = interval.cfg.startingInterval;
+
+ expect(intervalId).toBeUndefined();
+ expect(currentInterval).toBe(intervalLowerLimit);
+
+ done();
+ }, DEFAULT_SHORT_TIMEOUT);
+ });
+
+ it('should resume an interval', function (done) {
+ const interval = this.smartInterval;
+
+ setTimeout(() => {
+ interval.cancel();
+
+ interval.resume();
+
+ const intervalId = interval.state.intervalId;
+
+ expect(intervalId).toBeTruthy();
+
+ done();
+ }, DEFAULT_SHORT_TIMEOUT);
+ });
+ });
+
+ describe('DOM Events', function () {
+ beforeEach(function () {
+ // This ensures DOM and DOM events are initialized for these specs.
+ fixture.set('<div></div>');
+
+ this.smartInterval = createDefaultSmartInterval();
+ });
+
+ it('should pause when page is not visible', function (done) {
+ const interval = this.smartInterval;
+
+ setTimeout(() => {
+ expect(interval.state.intervalId).toBeTruthy();
+
+ // simulates triggering of visibilitychange event
+ interval.state.pageVisibility = 'hidden';
+ interval.handleVisibilityChange();
+
+ expect(interval.state.intervalId).toBeUndefined();
+ done();
+ }, DEFAULT_SHORT_TIMEOUT);
+ });
+
+ it('should resume when page is becomes visible at the previous interval', function (done) {
+ const interval = this.smartInterval;
+
+ setTimeout(() => {
+ expect(interval.state.intervalId).toBeTruthy();
+
+ // simulates triggering of visibilitychange event
+ interval.state.pageVisibility = 'hidden';
+ interval.handleVisibilityChange();
+
+ expect(interval.state.intervalId).toBeUndefined();
+
+ // simulates triggering of visibilitychange event
+ interval.state.pageVisibility = 'visible';
+ interval.handleVisibilityChange();
+
+ expect(interval.state.intervalId).toBeTruthy();
+
+ done();
+ }, DEFAULT_SHORT_TIMEOUT);
+ });
+
+ it('should cancel on page unload', function (done) {
+ const interval = this.smartInterval;
+
+ setTimeout(() => {
+ $(document).trigger('page:before-unload');
+ expect(interval.state.intervalId).toBeUndefined();
+ expect(interval.getCurrentInterval()).toBe(interval.cfg.startingInterval);
+ done();
+ }, DEFAULT_SHORT_TIMEOUT);
+ });
+ });
+ });
+})(window.gl || (window.gl = {}));
diff --git a/spec/javascripts/subbable_resource_spec.js.es6 b/spec/javascripts/subbable_resource_spec.js.es6
new file mode 100644
index 00000000000..df395296791
--- /dev/null
+++ b/spec/javascripts/subbable_resource_spec.js.es6
@@ -0,0 +1,65 @@
+/* eslint-disable */
+//= vue
+//= vue-resource
+//= require jquery
+//= require subbable_resource
+
+/*
+* Test that each rest verb calls the publish and subscribe function and passes the correct value back
+*
+*
+* */
+((global) => {
+ describe('Subbable Resource', function () {
+ describe('PubSub', function () {
+ beforeEach(function () {
+ this.MockResource = new global.SubbableResource('https://example.com');
+ });
+ it('should successfully add a single subscriber', function () {
+ const callback = () => {};
+ this.MockResource.subscribe(callback);
+
+ expect(this.MockResource.subscribers.length).toBe(1);
+ expect(this.MockResource.subscribers[0]).toBe(callback);
+ });
+
+ it('should successfully add multiple subscribers', function () {
+ const callbackOne = () => {};
+ const callbackTwo = () => {};
+ const callbackThree = () => {};
+
+ this.MockResource.subscribe(callbackOne);
+ this.MockResource.subscribe(callbackTwo);
+ this.MockResource.subscribe(callbackThree);
+
+ expect(this.MockResource.subscribers.length).toBe(3);
+ });
+
+ it('should successfully publish an update to a single subscriber', function () {
+ const state = { myprop: 1 };
+
+ const callbacks = {
+ one: (data) => expect(data.myprop).toBe(2),
+ two: (data) => expect(data.myprop).toBe(2),
+ three: (data) => expect(data.myprop).toBe(2)
+ };
+
+ const spyOne = spyOn(callbacks, 'one');
+ const spyTwo = spyOn(callbacks, 'two');
+ const spyThree = spyOn(callbacks, 'three');
+
+ this.MockResource.subscribe(callbacks.one);
+ this.MockResource.subscribe(callbacks.two);
+ this.MockResource.subscribe(callbacks.three);
+
+ state.myprop++;
+
+ this.MockResource.publish(state);
+
+ expect(spyOne).toHaveBeenCalled();
+ expect(spyTwo).toHaveBeenCalled();
+ expect(spyThree).toHaveBeenCalled();
+ });
+ });
+ });
+})(window.gl || (window.gl = {}));
diff --git a/spec/lib/gitlab/chat_commands/command_spec.rb b/spec/lib/gitlab/chat_commands/command_spec.rb
index 8cedbb0240f..bfc6818ac08 100644
--- a/spec/lib/gitlab/chat_commands/command_spec.rb
+++ b/spec/lib/gitlab/chat_commands/command_spec.rb
@@ -4,9 +4,9 @@ describe Gitlab::ChatCommands::Command, service: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
- subject { described_class.new(project, user, params).execute }
-
describe '#execute' do
+ subject { described_class.new(project, user, params).execute }
+
context 'when no command is available' do
let(:params) { { text: 'issue show 1' } }
let(:project) { create(:project, has_external_issue_tracker: true) }
@@ -51,5 +51,44 @@ describe Gitlab::ChatCommands::Command, service: true do
expect(subject[:text]).to match(/\/issues\/\d+/)
end
end
+
+ context 'when trying to do deployment' do
+ let(:params) { { text: 'deploy staging to production' } }
+ let!(:build) { create(:ci_build, project: project) }
+ let!(:staging) { create(:environment, name: 'staging', project: project) }
+ let!(:deployment) { create(:deployment, environment: staging, deployable: build) }
+ let!(:manual) do
+ create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'first', environment: 'production')
+ end
+
+ context 'and user can not create deployment' do
+ it 'returns action' do
+ expect(subject[:response_type]).to be(:ephemeral)
+ expect(subject[:text]).to start_with('Whoops! That action is not allowed')
+ end
+ end
+
+ context 'and user does have deployment permission' do
+ before do
+ project.team << [user, :developer]
+ end
+
+ it 'returns action' do
+ expect(subject[:text]).to include('Deployment from staging to production started')
+ expect(subject[:response_type]).to be(:in_channel)
+ end
+
+ context 'when duplicate action exists' do
+ let!(:manual2) do
+ create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'second', environment: 'production')
+ end
+
+ it 'returns error' do
+ expect(subject[:response_type]).to be(:ephemeral)
+ expect(subject[:text]).to include('Too many actions defined')
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/chat_commands/deploy_spec.rb b/spec/lib/gitlab/chat_commands/deploy_spec.rb
new file mode 100644
index 00000000000..bd8099c92da
--- /dev/null
+++ b/spec/lib/gitlab/chat_commands/deploy_spec.rb
@@ -0,0 +1,79 @@
+require 'spec_helper'
+
+describe Gitlab::ChatCommands::Deploy, service: true do
+ describe '#execute' do
+ let(:project) { create(:empty_project) }
+ let(:user) { create(:user) }
+ let(:regex_match) { described_class.match('deploy staging to production') }
+
+ before do
+ project.team << [user, :master]
+ end
+
+ subject do
+ described_class.new(project, user).execute(regex_match)
+ end
+
+ context 'if no environment is defined' do
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'with environment' do
+ let!(:staging) { create(:environment, name: 'staging', project: project) }
+ let!(:build) { create(:ci_build, project: project) }
+ let!(:deployment) { create(:deployment, environment: staging, deployable: build) }
+
+ context 'without actions' do
+ it 'returns nil' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'with action' do
+ let!(:manual1) do
+ create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'first', environment: 'production')
+ end
+
+ it 'returns success result' do
+ expect(subject.type).to eq(:success)
+ expect(subject.message).to include('Deployment from staging to production started')
+ end
+
+ context 'when duplicate action exists' do
+ let!(:manual2) do
+ create(:ci_build, :manual, project: project, pipeline: build.pipeline, name: 'second', environment: 'production')
+ end
+
+ it 'returns error' do
+ expect(subject.type).to eq(:error)
+ expect(subject.message).to include('Too many actions defined')
+ end
+ end
+
+ context 'when teardown action exists' do
+ let!(:teardown) do
+ create(:ci_build, :manual, :teardown_environment,
+ project: project, pipeline: build.pipeline,
+ name: 'teardown', environment: 'production')
+ end
+
+ it 'returns success result' do
+ expect(subject.type).to eq(:success)
+ expect(subject.message).to include('Deployment from staging to production started')
+ end
+ end
+ end
+ end
+ end
+
+ describe 'self.match' do
+ it 'matches the environment' do
+ match = described_class.match('deploy staging to production')
+
+ expect(match[:from]).to eq('staging')
+ expect(match[:to]).to eq('production')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/credentials/factory_spec.rb b/spec/lib/gitlab/ci/build/credentials/factory_spec.rb
new file mode 100644
index 00000000000..10b4b7a8826
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/credentials/factory_spec.rb
@@ -0,0 +1,38 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Credentials::Factory do
+ let(:build) { create(:ci_build, name: 'spinach', stage: 'test', stage_idx: 0) }
+
+ subject { Gitlab::Ci::Build::Credentials::Factory.new(build).create! }
+
+ class TestProvider
+ def initialize(build); end
+ end
+
+ before do
+ allow_any_instance_of(Gitlab::Ci::Build::Credentials::Factory).to receive(:providers).and_return([TestProvider])
+ end
+
+ context 'when provider is valid' do
+ before do
+ allow_any_instance_of(TestProvider).to receive(:valid?).and_return(true)
+ end
+
+ it 'generates an array of credentials objects' do
+ is_expected.to be_kind_of(Array)
+ is_expected.not_to be_empty
+ expect(subject.first).to be_kind_of(TestProvider)
+ end
+ end
+
+ context 'when provider is not valid' do
+ before do
+ allow_any_instance_of(TestProvider).to receive(:valid?).and_return(false)
+ end
+
+ it 'generates an array without specific credential object' do
+ is_expected.to be_kind_of(Array)
+ is_expected.to be_empty
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/credentials/registry_spec.rb b/spec/lib/gitlab/ci/build/credentials/registry_spec.rb
new file mode 100644
index 00000000000..84e44dd53e2
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/credentials/registry_spec.rb
@@ -0,0 +1,41 @@
+require 'spec_helper'
+
+describe Gitlab::Ci::Build::Credentials::Registry do
+ let(:build) { create(:ci_build, name: 'spinach', stage: 'test', stage_idx: 0) }
+ let(:registry_url) { 'registry.example.com:5005' }
+
+ subject { Gitlab::Ci::Build::Credentials::Registry.new(build) }
+
+ before do
+ stub_container_registry_config(host_port: registry_url)
+ end
+
+ it 'contains valid DockerRegistry credentials' do
+ expect(subject).to be_kind_of(Gitlab::Ci::Build::Credentials::Registry)
+
+ expect(subject.username).to eq 'gitlab-ci-token'
+ expect(subject.password).to eq build.token
+ expect(subject.url).to eq registry_url
+ expect(subject.type).to eq 'registry'
+ end
+
+ describe '.valid?' do
+ subject { Gitlab::Ci::Build::Credentials::Registry.new(build).valid? }
+
+ context 'when registry is enabled' do
+ before do
+ stub_container_registry_config(enabled: true)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/global_spec.rb b/spec/lib/gitlab/ci/config/entry/global_spec.rb
index 5e5c5dcc385..e64c8d46bd8 100644
--- a/spec/lib/gitlab/ci/config/entry/global_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/global_spec.rb
@@ -13,7 +13,7 @@ describe Gitlab::Ci::Config::Entry::Global do
end
end
- context 'when hash is valid' do
+ context 'when configuration is valid' do
context 'when some entries defined' do
let(:hash) do
{ before_script: ['ls', 'pwd'],
@@ -225,29 +225,44 @@ describe Gitlab::Ci::Config::Entry::Global do
end
end
- context 'when hash is not valid' do
+ context 'when configuration is not valid' do
before { global.compose! }
- let(:hash) do
- { before_script: 'ls' }
- end
+ context 'when before script is not an array' do
+ let(:hash) do
+ { before_script: 'ls' }
+ end
- describe '#valid?' do
- it 'is not valid' do
- expect(global).not_to be_valid
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(global).not_to be_valid
+ end
end
- end
- describe '#errors' do
- it 'reports errors from child nodes' do
- expect(global.errors)
- .to include 'before_script config should be an array of strings'
+ describe '#errors' do
+ it 'reports errors from child nodes' do
+ expect(global.errors)
+ .to include 'before_script config should be an array of strings'
+ end
+ end
+
+ describe '#before_script_value' do
+ it 'returns nil' do
+ expect(global.before_script_value).to be_nil
+ end
end
end
- describe '#before_script_value' do
- it 'returns nil' do
- expect(global.before_script_value).to be_nil
+ context 'when job does not have commands' do
+ let(:hash) do
+ { before_script: ['echo 123'], rspec: { stage: 'test' } }
+ end
+
+ describe '#errors' do
+ it 'reports errors about missing script' do
+ expect(global.errors)
+ .to include "jobs:rspec script can't be blank"
+ end
end
end
end
@@ -281,7 +296,7 @@ describe Gitlab::Ci::Config::Entry::Global do
{ cache: { key: 'a' }, rspec: { script: 'ls' } }
end
- context 'when node exists' do
+ context 'when entry exists' do
it 'returns correct entry' do
expect(global[:cache])
.to be_an_instance_of Gitlab::Ci::Config::Entry::Cache
@@ -289,7 +304,7 @@ describe Gitlab::Ci::Config::Entry::Global do
end
end
- context 'when node does not exist' do
+ context 'when entry does not exist' do
it 'always return unspecified node' do
expect(global[:some][:unknown][:node])
.not_to be_specified
diff --git a/spec/lib/gitlab/cycle_analytics/permissions_spec.rb b/spec/lib/gitlab/cycle_analytics/permissions_spec.rb
new file mode 100644
index 00000000000..dc4f7dc69db
--- /dev/null
+++ b/spec/lib/gitlab/cycle_analytics/permissions_spec.rb
@@ -0,0 +1,127 @@
+require 'spec_helper'
+
+describe Gitlab::CycleAnalytics::Permissions do
+ let(:project) { create(:empty_project) }
+ let(:user) { create(:user) }
+
+ subject { described_class.get(user: user, project: project) }
+
+ context 'user with no relation to the project' do
+ it 'has no permissions to issue stage' do
+ expect(subject[:issue]).to eq(false)
+ end
+
+ it 'has no permissions to test stage' do
+ expect(subject[:test]).to eq(false)
+ end
+
+ it 'has no permissions to staging stage' do
+ expect(subject[:staging]).to eq(false)
+ end
+
+ it 'has no permissions to production stage' do
+ expect(subject[:production]).to eq(false)
+ end
+
+ it 'has no permissions to code stage' do
+ expect(subject[:code]).to eq(false)
+ end
+
+ it 'has no permissions to review stage' do
+ expect(subject[:review]).to eq(false)
+ end
+
+ it 'has no permissions to plan stage' do
+ expect(subject[:plan]).to eq(false)
+ end
+ end
+
+ context 'user is master' do
+ before do
+ project.team << [user, :master]
+ end
+
+ it 'has permissions to issue stage' do
+ expect(subject[:issue]).to eq(true)
+ end
+
+ it 'has permissions to test stage' do
+ expect(subject[:test]).to eq(true)
+ end
+
+ it 'has permissions to staging stage' do
+ expect(subject[:staging]).to eq(true)
+ end
+
+ it 'has permissions to production stage' do
+ expect(subject[:production]).to eq(true)
+ end
+
+ it 'has permissions to code stage' do
+ expect(subject[:code]).to eq(true)
+ end
+
+ it 'has permissions to review stage' do
+ expect(subject[:review]).to eq(true)
+ end
+
+ it 'has permissions to plan stage' do
+ expect(subject[:plan]).to eq(true)
+ end
+ end
+
+ context 'user has no build permissions' do
+ before do
+ project.team << [user, :guest]
+ end
+
+ it 'has permissions to issue stage' do
+ expect(subject[:issue]).to eq(true)
+ end
+
+ it 'has no permissions to test stage' do
+ expect(subject[:test]).to eq(false)
+ end
+
+ it 'has no permissions to staging stage' do
+ expect(subject[:staging]).to eq(false)
+ end
+ end
+
+ context 'user has no merge request permissions' do
+ before do
+ project.team << [user, :guest]
+ end
+
+ it 'has permissions to issue stage' do
+ expect(subject[:issue]).to eq(true)
+ end
+
+ it 'has no permissions to code stage' do
+ expect(subject[:code]).to eq(false)
+ end
+
+ it 'has no permissions to review stage' do
+ expect(subject[:review]).to eq(false)
+ end
+ end
+
+ context 'user has no issue permissions' do
+ before do
+ project.team << [user, :developer]
+ project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
+ end
+
+ it 'has permissions to code stage' do
+ expect(subject[:code]).to eq(true)
+ end
+
+ it 'has no permissions to issue stage' do
+ expect(subject[:issue]).to eq(false)
+ end
+
+ it 'has no permissions to production stage' do
+ expect(subject[:production]).to eq(false)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/file_detector_spec.rb b/spec/lib/gitlab/file_detector_spec.rb
new file mode 100644
index 00000000000..e5ba13bbaf8
--- /dev/null
+++ b/spec/lib/gitlab/file_detector_spec.rb
@@ -0,0 +1,59 @@
+require 'spec_helper'
+
+describe Gitlab::FileDetector do
+ describe '.types_in_paths' do
+ it 'returns the file types for the given paths' do
+ expect(described_class.types_in_paths(%w(README.md CHANGELOG VERSION VERSION))).
+ to eq(%i{readme changelog version})
+ end
+
+ it 'does not include unrecognized file paths' do
+ expect(described_class.types_in_paths(%w(README.md foo.txt))).
+ to eq(%i{readme})
+ end
+ end
+
+ describe '.type_of' do
+ it 'returns the type of a README file' do
+ expect(described_class.type_of('README.md')).to eq(:readme)
+ end
+
+ it 'returns the type of a changelog file' do
+ %w(CHANGELOG HISTORY CHANGES NEWS).each do |file|
+ expect(described_class.type_of(file)).to eq(:changelog)
+ end
+ end
+
+ it 'returns the type of a license file' do
+ %w(LICENSE LICENCE COPYING).each do |file|
+ expect(described_class.type_of(file)).to eq(:license)
+ end
+ end
+
+ it 'returns the type of a version file' do
+ expect(described_class.type_of('VERSION')).to eq(:version)
+ end
+
+ it 'returns the type of a .gitignore file' do
+ expect(described_class.type_of('.gitignore')).to eq(:gitignore)
+ end
+
+ it 'returns the type of a Koding config file' do
+ expect(described_class.type_of('.koding.yml')).to eq(:koding)
+ end
+
+ it 'returns the type of a GitLab CI config file' do
+ expect(described_class.type_of('.gitlab-ci.yml')).to eq(:gitlab_ci)
+ end
+
+ it 'returns the type of an avatar' do
+ %w(logo.gif logo.png logo.jpg).each do |file|
+ expect(described_class.type_of(file)).to eq(:avatar)
+ end
+ end
+
+ it 'returns nil for an unknown file' do
+ expect(described_class.type_of('foo.txt')).to be_nil
+ end
+ end
+end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 60bbe3fcd72..d06665197db 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -9,6 +9,7 @@ describe Environment, models: true do
it { is_expected.to delegate_method(:last_deployment).to(:deployments).as(:last) }
it { is_expected.to delegate_method(:stop_action).to(:last_deployment) }
+ it { is_expected.to delegate_method(:manual_actions).to(:last_deployment) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_uniqueness_of(:name).scoped_to(:project_id) }
@@ -187,4 +188,15 @@ describe Environment, models: true do
it { is_expected.to be false }
end
end
+
+ describe '#actions_for' do
+ let(:deployment) { create(:deployment, environment: environment) }
+ let(:pipeline) { deployment.deployable.pipeline }
+ let!(:review_action) { create(:ci_build, :manual, name: 'review-apps', pipeline: pipeline, environment: 'review/$CI_BUILD_REF_NAME' )}
+ let!(:production_action) { create(:ci_build, :manual, name: 'production', pipeline: pipeline, environment: 'production' )}
+
+ it 'returns a list of actions with matching environment' do
+ expect(environment.actions_for('review/master')).to contain_exactly(review_action)
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 25458e20618..e183fa88873 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -1572,7 +1572,7 @@ describe Project, models: true do
end
it 'expires the avatar cache' do
- expect(project.repository).to receive(:expire_avatar_cache).with(project.default_branch)
+ expect(project.repository).to receive(:expire_avatar_cache)
project.change_head(project.default_branch)
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 72ac41f3472..04afb8ebc98 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -464,11 +464,7 @@ describe Repository, models: true do
end
end
- describe "#changelog" do
- before do
- repository.send(:cache).expire(:changelog)
- end
-
+ describe "#changelog", caching: true do
it 'accepts changelog' do
expect(repository.tree).to receive(:blobs).and_return([TestBlob.new('changelog')])
@@ -500,17 +496,16 @@ describe Repository, models: true do
end
end
- describe "#license_blob" do
+ describe "#license_blob", caching: true do
before do
- repository.send(:cache).expire(:license_blob)
repository.remove_file(user, 'LICENSE', 'Remove LICENSE', 'master')
end
it 'handles when HEAD points to non-existent ref' do
repository.commit_file(user, 'LICENSE', 'Copyright!', 'Add LICENSE', 'master', false)
- rugged = double('rugged')
- expect(rugged).to receive(:head_unborn?).and_return(true)
- expect(repository).to receive(:rugged).and_return(rugged)
+
+ allow(repository).to receive(:file_on_head).
+ and_raise(Rugged::ReferenceError)
expect(repository.license_blob).to be_nil
end
@@ -537,22 +532,18 @@ describe Repository, models: true do
end
end
- describe '#license_key' do
+ describe '#license_key', caching: true do
before do
- repository.send(:cache).expire(:license_key)
repository.remove_file(user, 'LICENSE', 'Remove LICENSE', 'master')
end
- it 'handles when HEAD points to non-existent ref' do
- repository.commit_file(user, 'LICENSE', 'Copyright!', 'Add LICENSE', 'master', false)
- rugged = double('rugged')
- expect(rugged).to receive(:head_unborn?).and_return(true)
- expect(repository).to receive(:rugged).and_return(rugged)
-
+ it 'returns nil when no license is detected' do
expect(repository.license_key).to be_nil
end
- it 'returns nil when no license is detected' do
+ it 'returns nil when the repository does not exist' do
+ expect(repository).to receive(:exists?).and_return(false)
+
expect(repository.license_key).to be_nil
end
@@ -569,7 +560,7 @@ describe Repository, models: true do
end
end
- describe "#gitlab_ci_yml" do
+ describe "#gitlab_ci_yml", caching: true do
it 'returns valid file' do
files = [TestBlob.new('file'), TestBlob.new('.gitlab-ci.yml'), TestBlob.new('copying')]
expect(repository.tree).to receive(:blobs).and_return(files)
@@ -583,7 +574,7 @@ describe Repository, models: true do
end
it 'returns nil for empty repository' do
- expect(repository).to receive(:empty?).and_return(true)
+ allow(repository).to receive(:file_on_head).and_raise(Rugged::ReferenceError)
expect(repository.gitlab_ci_yml).to be_nil
end
end
@@ -778,7 +769,6 @@ describe Repository, models: true do
expect(repository).not_to receive(:expire_emptiness_caches)
expect(repository).to receive(:expire_branches_cache)
expect(repository).to receive(:expire_has_visible_content_cache)
- expect(repository).to receive(:expire_branch_count_cache)
repository.update_branch_with_hooks(user, 'new-feature') { new_rev }
end
@@ -797,7 +787,6 @@ describe Repository, models: true do
expect(empty_repository).to receive(:expire_emptiness_caches)
expect(empty_repository).to receive(:expire_branches_cache)
expect(empty_repository).to receive(:expire_has_visible_content_cache)
- expect(empty_repository).to receive(:expire_branch_count_cache)
empty_repository.commit_file(user, 'CHANGELOG', 'Changelog!',
'Updates file content', 'master', false)
@@ -811,8 +800,7 @@ describe Repository, models: true do
end
it 'returns false when a repository does not exist' do
- expect(repository.raw_repository).to receive(:rugged).
- and_raise(Gitlab::Git::Repository::NoRepository)
+ allow(repository).to receive(:refs_directory_exists?).and_return(false)
expect(repository.exists?).to eq(false)
end
@@ -916,34 +904,6 @@ describe Repository, models: true do
end
end
- describe '#expire_cache' do
- it 'expires all caches' do
- expect(repository).to receive(:expire_branch_cache)
-
- repository.expire_cache
- end
-
- it 'expires the caches for a specific branch' do
- expect(repository).to receive(:expire_branch_cache).with('master')
-
- repository.expire_cache('master')
- end
-
- it 'expires the emptiness caches for an empty repository' do
- expect(repository).to receive(:empty?).and_return(true)
- expect(repository).to receive(:expire_emptiness_caches)
-
- repository.expire_cache
- end
-
- it 'does not expire the emptiness caches for a non-empty repository' do
- expect(repository).to receive(:empty?).and_return(false)
- expect(repository).not_to receive(:expire_emptiness_caches)
-
- repository.expire_cache
- end
- end
-
describe '#expire_root_ref_cache' do
it 'expires the root reference cache' do
repository.root_ref
@@ -1003,12 +963,23 @@ describe Repository, models: true do
describe '#expire_emptiness_caches' do
let(:cache) { repository.send(:cache) }
- it 'expires the caches' do
+ it 'expires the caches for an empty repository' do
+ allow(repository).to receive(:empty?).and_return(true)
+
expect(cache).to receive(:expire).with(:empty?)
expect(repository).to receive(:expire_has_visible_content_cache)
repository.expire_emptiness_caches
end
+
+ it 'does not expire the cache for a non-empty repository' do
+ allow(repository).to receive(:empty?).and_return(false)
+
+ expect(cache).not_to receive(:expire).with(:empty?)
+ expect(repository).not_to receive(:expire_has_visible_content_cache)
+
+ repository.expire_emptiness_caches
+ end
end
describe :skip_merged_commit do
@@ -1120,24 +1091,12 @@ describe Repository, models: true do
repository.before_delete
end
- it 'flushes the tag count cache' do
- expect(repository).to receive(:expire_tag_count_cache)
-
- repository.before_delete
- end
-
it 'flushes the branches cache' do
expect(repository).to receive(:expire_branches_cache)
repository.before_delete
end
- it 'flushes the branch count cache' do
- expect(repository).to receive(:expire_branch_count_cache)
-
- repository.before_delete
- end
-
it 'flushes the root ref cache' do
expect(repository).to receive(:expire_root_ref_cache)
@@ -1162,36 +1121,18 @@ describe Repository, models: true do
allow(repository).to receive(:exists?).and_return(true)
end
- it 'flushes the caches that depend on repository data' do
- expect(repository).to receive(:expire_cache)
-
- repository.before_delete
- end
-
it 'flushes the tags cache' do
expect(repository).to receive(:expire_tags_cache)
repository.before_delete
end
- it 'flushes the tag count cache' do
- expect(repository).to receive(:expire_tag_count_cache)
-
- repository.before_delete
- end
-
it 'flushes the branches cache' do
expect(repository).to receive(:expire_branches_cache)
repository.before_delete
end
- it 'flushes the branch count cache' do
- expect(repository).to receive(:expire_branch_count_cache)
-
- repository.before_delete
- end
-
it 'flushes the root ref cache' do
expect(repository).to receive(:expire_root_ref_cache)
@@ -1222,8 +1163,9 @@ describe Repository, models: true do
describe '#before_push_tag' do
it 'flushes the cache' do
- expect(repository).to receive(:expire_cache)
- expect(repository).to receive(:expire_tag_count_cache)
+ expect(repository).to receive(:expire_statistics_caches)
+ expect(repository).to receive(:expire_emptiness_caches)
+ expect(repository).to receive(:expire_tags_cache)
repository.before_push_tag
end
@@ -1240,17 +1182,23 @@ describe Repository, models: true do
describe '#after_import' do
it 'flushes and builds the cache' do
expect(repository).to receive(:expire_content_cache)
- expect(repository).to receive(:build_cache)
+ expect(repository).to receive(:expire_tags_cache)
+ expect(repository).to receive(:expire_branches_cache)
repository.after_import
end
end
describe '#after_push_commit' do
- it 'flushes the cache' do
- expect(repository).to receive(:expire_cache).with('master', '123')
+ it 'expires statistics caches' do
+ expect(repository).to receive(:expire_statistics_caches).
+ and_call_original
- repository.after_push_commit('master', '123')
+ expect(repository).to receive(:expire_branch_cache).
+ with('master').
+ and_call_original
+
+ repository.after_push_commit('master')
end
end
@@ -1302,7 +1250,8 @@ describe Repository, models: true do
describe '#before_remove_tag' do
it 'flushes the tag cache' do
- expect(repository).to receive(:expire_tag_count_cache)
+ expect(repository).to receive(:expire_tags_cache).and_call_original
+ expect(repository).to receive(:expire_statistics_caches).and_call_original
repository.before_remove_tag
end
@@ -1320,23 +1269,23 @@ describe Repository, models: true do
end
end
- describe '#expire_branch_count_cache' do
- let(:cache) { repository.send(:cache) }
-
+ describe '#expire_branches_cache' do
it 'expires the cache' do
- expect(cache).to receive(:expire).with(:branch_count)
+ expect(repository).to receive(:expire_method_caches).
+ with(%i(branch_names branch_count)).
+ and_call_original
- repository.expire_branch_count_cache
+ repository.expire_branches_cache
end
end
- describe '#expire_tag_count_cache' do
- let(:cache) { repository.send(:cache) }
-
+ describe '#expire_tags_cache' do
it 'expires the cache' do
- expect(cache).to receive(:expire).with(:tag_count)
+ expect(repository).to receive(:expire_method_caches).
+ with(%i(tag_names tag_count)).
+ and_call_original
- repository.expire_tag_count_cache
+ repository.expire_tags_cache
end
end
@@ -1412,170 +1361,316 @@ describe Repository, models: true do
describe '#avatar' do
it 'returns nil if repo does not exist' do
- expect(repository).to receive(:exists?).and_return(false)
+ expect(repository).to receive(:file_on_head).
+ and_raise(Rugged::ReferenceError)
expect(repository.avatar).to eq(nil)
end
it 'returns the first avatar file found in the repository' do
- expect(repository).to receive(:blob_at_branch).
- with('master', 'logo.png').
- and_return(true)
+ expect(repository).to receive(:file_on_head).
+ with(:avatar).
+ and_return(double(:tree, path: 'logo.png'))
expect(repository.avatar).to eq('logo.png')
end
it 'caches the output' do
- allow(repository).to receive(:blob_at_branch).
- with('master', 'logo.png').
- and_return(true)
-
- expect(repository.avatar).to eq('logo.png')
+ expect(repository).to receive(:file_on_head).
+ with(:avatar).
+ once.
+ and_return(double(:tree, path: 'logo.png'))
- expect(repository).not_to receive(:blob_at_branch)
- expect(repository.avatar).to eq('logo.png')
+ 2.times { expect(repository.avatar).to eq('logo.png') }
end
end
- describe '#expire_avatar_cache' do
+ describe '#expire_exists_cache' do
let(:cache) { repository.send(:cache) }
- before do
- allow(repository).to receive(:cache).and_return(cache)
+ it 'expires the cache' do
+ expect(cache).to receive(:expire).with(:exists?)
+
+ repository.expire_exists_cache
end
+ end
- context 'without a branch or revision' do
- it 'flushes the cache' do
- expect(cache).to receive(:expire).with(:avatar)
+ describe "#keep_around" do
+ it "does not fail if we attempt to reference bad commit" do
+ expect(repository.kept_around?('abc1234')).to be_falsey
+ end
- repository.expire_avatar_cache
- end
+ it "stores a reference to the specified commit sha so it isn't garbage collected" do
+ repository.keep_around(sample_commit.id)
+
+ expect(repository.kept_around?(sample_commit.id)).to be_truthy
+ end
+
+ it "attempting to call keep_around on truncated ref does not fail" do
+ repository.keep_around(sample_commit.id)
+ ref = repository.send(:keep_around_ref_name, sample_commit.id)
+ path = File.join(repository.path, ref)
+ # Corrupt the reference
+ File.truncate(path, 0)
+
+ expect(repository.kept_around?(sample_commit.id)).to be_falsey
+
+ repository.keep_around(sample_commit.id)
+
+ expect(repository.kept_around?(sample_commit.id)).to be_falsey
+
+ File.delete(path)
+ end
+ end
+
+ describe '#update_ref!' do
+ it 'can create a ref' do
+ repository.update_ref!('refs/heads/foobar', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
+
+ expect(repository.find_branch('foobar')).not_to be_nil
end
- context 'with a branch' do
- it 'does not flush the cache if the branch is not the default branch' do
- expect(cache).not_to receive(:expire)
+ it 'raises CommitError when the ref update fails' do
+ expect do
+ repository.update_ref!('refs/heads/master', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
+ end.to raise_error(Repository::CommitError)
+ end
+ end
+
+ describe '#contribution_guide', caching: true do
+ it 'returns and caches the output' do
+ expect(repository).to receive(:file_on_head).
+ with(:contributing).
+ and_return(Gitlab::Git::Tree.new(path: 'CONTRIBUTING.md')).
+ once
- repository.expire_avatar_cache('cats')
+ 2.times do
+ expect(repository.contribution_guide).
+ to be_an_instance_of(Gitlab::Git::Tree)
end
+ end
+ end
- it 'flushes the cache if the branch equals the default branch' do
- expect(cache).to receive(:expire).with(:avatar)
+ describe '#gitignore', caching: true do
+ it 'returns and caches the output' do
+ expect(repository).to receive(:file_on_head).
+ with(:gitignore).
+ and_return(Gitlab::Git::Tree.new(path: '.gitignore')).
+ once
- repository.expire_avatar_cache(repository.root_ref)
+ 2.times do
+ expect(repository.gitignore).to be_an_instance_of(Gitlab::Git::Tree)
end
end
+ end
- context 'with a branch and revision' do
- let(:commit) { double(:commit) }
+ describe '#koding_yml', caching: true do
+ it 'returns and caches the output' do
+ expect(repository).to receive(:file_on_head).
+ with(:koding).
+ and_return(Gitlab::Git::Tree.new(path: '.koding.yml')).
+ once
- before do
- allow(repository).to receive(:commit).and_return(commit)
+ 2.times do
+ expect(repository.koding_yml).to be_an_instance_of(Gitlab::Git::Tree)
end
+ end
+ end
- it 'does not flush the cache if the commit does not change any logos' do
- diff = double(:diff, new_path: 'test.txt')
+ describe '#readme', caching: true do
+ context 'with a non-existing repository' do
+ it 'returns nil' do
+ expect(repository).to receive(:tree).with(:head).and_return(nil)
- expect(commit).to receive(:raw_diffs).and_return([diff])
- expect(cache).not_to receive(:expire)
+ expect(repository.readme).to be_nil
+ end
+ end
- repository.expire_avatar_cache(repository.root_ref, '123')
+ context 'with an existing repository' do
+ it 'returns the README' do
+ expect(repository.readme).to be_an_instance_of(Gitlab::Git::Blob)
end
+ end
+ end
- it 'flushes the cache if the commit changes any of the logos' do
- diff = double(:diff, new_path: Repository::AVATAR_FILES[0])
+ describe '#expire_statistics_caches' do
+ it 'expires the caches' do
+ expect(repository).to receive(:expire_method_caches).
+ with(%i(size commit_count))
+
+ repository.expire_statistics_caches
+ end
+ end
- expect(commit).to receive(:raw_diffs).and_return([diff])
- expect(cache).to receive(:expire).with(:avatar)
+ describe '#expire_method_caches' do
+ it 'expires the caches of the given methods' do
+ expect_any_instance_of(RepositoryCache).to receive(:expire).with(:readme)
+ expect_any_instance_of(RepositoryCache).to receive(:expire).with(:gitignore)
- repository.expire_avatar_cache(repository.root_ref, '123')
- end
+ repository.expire_method_caches(%i(readme gitignore))
end
end
- describe '#expire_exists_cache' do
- let(:cache) { repository.send(:cache) }
+ describe '#expire_all_method_caches' do
+ it 'expires the caches of all methods' do
+ expect(repository).to receive(:expire_method_caches).
+ with(Repository::CACHED_METHODS)
+
+ repository.expire_all_method_caches
+ end
+ end
+ describe '#expire_avatar_cache' do
it 'expires the cache' do
- expect(cache).to receive(:expire).with(:exists?)
+ expect(repository).to receive(:expire_method_caches).with(%i(avatar))
- repository.expire_exists_cache
+ repository.expire_avatar_cache
end
end
- describe '#build_cache' do
- let(:cache) { repository.send(:cache) }
+ describe '#file_on_head' do
+ context 'with a non-existing repository' do
+ it 'returns nil' do
+ expect(repository).to receive(:tree).with(:head).and_return(nil)
- it 'builds the caches if they do not already exist' do
- cache_keys = repository.cache_keys + repository.cache_keys_for_branches_and_tags
+ expect(repository.file_on_head(:readme)).to be_nil
+ end
+ end
- expect(cache).to receive(:exist?).
- exactly(cache_keys.length).
- times.
- and_return(false)
+ context 'with a repository that has no blobs' do
+ it 'returns nil' do
+ expect_any_instance_of(Tree).to receive(:blobs).and_return([])
+
+ expect(repository.file_on_head(:readme)).to be_nil
+ end
+ end
+
+ context 'with an existing repository' do
+ it 'returns a Gitlab::Git::Tree' do
+ expect(repository.file_on_head(:readme)).
+ to be_an_instance_of(Gitlab::Git::Tree)
+ end
+ end
+ end
+
+ describe '#head_tree' do
+ context 'with an existing repository' do
+ it 'returns a Tree' do
+ expect(repository.head_tree).to be_an_instance_of(Tree)
+ end
+ end
+
+ context 'with a non-existing repository' do
+ it 'returns nil' do
+ expect(repository).to receive(:head_commit).and_return(nil)
+
+ expect(repository.head_tree).to be_nil
+ end
+ end
+ end
- cache_keys.each do |key|
- expect(repository).to receive(key)
+ describe '#tree' do
+ context 'using a non-existing repository' do
+ before do
+ allow(repository).to receive(:head_commit).and_return(nil)
+ end
+
+ it 'returns nil' do
+ expect(repository.tree(:head)).to be_nil
end
- repository.build_cache
+ it 'returns nil when using a path' do
+ expect(repository.tree(:head, 'README.md')).to be_nil
+ end
end
- it 'does not build any caches that already exist' do
- cache_keys = repository.cache_keys + repository.cache_keys_for_branches_and_tags
+ context 'using an existing repository' do
+ it 'returns a Tree' do
+ expect(repository.tree(:head)).to be_an_instance_of(Tree)
+ end
+ end
+ end
- expect(cache).to receive(:exist?).
- exactly(cache_keys.length).
- times.
- and_return(true)
+ describe '#size' do
+ context 'with a non-existing repository' do
+ it 'returns 0' do
+ expect(repository).to receive(:exists?).and_return(false)
- cache_keys.each do |key|
- expect(repository).not_to receive(key)
+ expect(repository.size).to eq(0.0)
end
+ end
- repository.build_cache
+ context 'with an existing repository' do
+ it 'returns the repository size as a Float' do
+ expect(repository.size).to be_an_instance_of(Float)
+ end
end
end
- describe "#keep_around" do
- it "does not fail if we attempt to reference bad commit" do
- expect(repository.kept_around?('abc1234')).to be_falsey
+ describe '#commit_count' do
+ context 'with a non-existing repository' do
+ it 'returns 0' do
+ expect(repository).to receive(:root_ref).and_return(nil)
+
+ expect(repository.commit_count).to eq(0)
+ end
end
- it "stores a reference to the specified commit sha so it isn't garbage collected" do
- repository.keep_around(sample_commit.id)
+ context 'with an existing repository' do
+ it 'returns the commit count' do
+ expect(repository.commit_count).to be_an_instance_of(Fixnum)
+ end
+ end
+ end
- expect(repository.kept_around?(sample_commit.id)).to be_truthy
+ describe '#cache_method_output', caching: true do
+ context 'with a non-existing repository' do
+ let(:value) do
+ repository.cache_method_output(:cats, fallback: 10) do
+ raise Rugged::ReferenceError
+ end
+ end
+
+ it 'returns a fallback value' do
+ expect(value).to eq(10)
+ end
+
+ it 'does not cache the data' do
+ value
+
+ expect(repository.instance_variable_defined?(:@cats)).to eq(false)
+ expect(repository.send(:cache).exist?(:cats)).to eq(false)
+ end
end
- it "attempting to call keep_around on truncated ref does not fail" do
- repository.keep_around(sample_commit.id)
- ref = repository.send(:keep_around_ref_name, sample_commit.id)
- path = File.join(repository.path, ref)
- # Corrupt the reference
- File.truncate(path, 0)
+ context 'with an existing repository' do
+ it 'caches the output' do
+ object = double
- expect(repository.kept_around?(sample_commit.id)).to be_falsey
+ expect(object).to receive(:number).once.and_return(10)
- repository.keep_around(sample_commit.id)
+ 2.times do
+ val = repository.cache_method_output(:cats) { object.number }
- expect(repository.kept_around?(sample_commit.id)).to be_falsey
+ expect(val).to eq(10)
+ end
- File.delete(path)
+ expect(repository.send(:cache).exist?(:cats)).to eq(true)
+ expect(repository.instance_variable_get(:@cats)).to eq(10)
+ end
end
end
- describe '#update_ref!' do
- it 'can create a ref' do
- repository.update_ref!('refs/heads/foobar', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
+ describe '#refresh_method_caches' do
+ it 'refreshes the caches of the given types' do
+ expect(repository).to receive(:expire_method_caches).
+ with(%i(readme license_blob license_key))
- expect(repository.find_branch('foobar')).not_to be_nil
- end
+ expect(repository).to receive(:readme)
+ expect(repository).to receive(:license_blob)
+ expect(repository).to receive(:license_key)
- it 'raises CommitError when the ref update fails' do
- expect do
- repository.update_ref!('refs/heads/master', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
- end.to raise_error(Repository::CommitError)
+ repository.refresh_method_caches(%i(readme license))
end
end
end
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 8f605757186..fe6b875b997 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -14,7 +14,7 @@ describe API::API, api: true do
describe "GET /projects/:id/repository/branches" do
it "returns an array of project branches" do
- project.repository.expire_cache
+ project.repository.expire_all_method_caches
get api("/projects/#{project.id}/repository/branches", user)
expect(response).to have_http_status(200)
diff --git a/spec/requests/api/project_snippets_spec.rb b/spec/requests/api/project_snippets_spec.rb
index 01148f0a05e..1c25fd04339 100644
--- a/spec/requests/api/project_snippets_spec.rb
+++ b/spec/requests/api/project_snippets_spec.rb
@@ -3,10 +3,12 @@ require 'rails_helper'
describe API::API, api: true do
include ApiHelpers
+ let(:project) { create(:empty_project, :public) }
+ let(:admin) { create(:admin) }
+
describe 'GET /projects/:project_id/snippets/:id' do
# TODO (rspeicher): Deprecated; remove in 9.0
it 'always exposes expires_at as nil' do
- admin = create(:admin)
snippet = create(:project_snippet, author: admin)
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}", admin)
@@ -17,9 +19,9 @@ describe API::API, api: true do
end
describe 'GET /projects/:project_id/snippets/' do
+ let(:user) { create(:user) }
+
it 'returns all snippets available to team member' do
- project = create(:project, :public)
- user = create(:user)
project.team << [user, :developer]
public_snippet = create(:project_snippet, :public, project: project)
internal_snippet = create(:project_snippet, :internal, project: project)
@@ -34,8 +36,6 @@ describe API::API, api: true do
end
it 'hides private snippets from regular user' do
- project = create(:project, :public)
- user = create(:user)
create(:project_snippet, :private, project: project)
get api("/projects/#{project.id}/snippets/", user)
@@ -45,16 +45,16 @@ describe API::API, api: true do
end
describe 'POST /projects/:project_id/snippets/' do
- it 'creates a new snippet' do
- admin = create(:admin)
- project = create(:project)
- params = {
+ let(:params) do
+ {
title: 'Test Title',
file_name: 'test.rb',
code: 'puts "hello world"',
visibility_level: Gitlab::VisibilityLevel::PUBLIC
}
+ end
+ it 'creates a new snippet' do
post api("/projects/#{project.id}/snippets/", admin), params
expect(response).to have_http_status(201)
@@ -64,12 +64,20 @@ describe API::API, api: true do
expect(snippet.file_name).to eq(params[:file_name])
expect(snippet.visibility_level).to eq(params[:visibility_level])
end
+
+ it 'returns 400 for missing parameters' do
+ params.delete(:title)
+
+ post api("/projects/#{project.id}/snippets/", admin), params
+
+ expect(response).to have_http_status(400)
+ end
end
describe 'PUT /projects/:project_id/snippets/:id/' do
+ let(:snippet) { create(:project_snippet, author: admin) }
+
it 'updates snippet' do
- admin = create(:admin)
- snippet = create(:project_snippet, author: admin)
new_content = 'New content'
put api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/", admin), code: new_content
@@ -78,9 +86,24 @@ describe API::API, api: true do
snippet.reload
expect(snippet.content).to eq(new_content)
end
+
+ it 'returns 404 for invalid snippet id' do
+ put api("/projects/#{snippet.project.id}/snippets/1234", admin), title: 'foo'
+
+ expect(response).to have_http_status(404)
+ expect(json_response['message']).to eq('404 Snippet Not Found')
+ end
+
+ it 'returns 400 for missing parameters' do
+ put api("/projects/#{project.id}/snippets/1234", admin)
+
+ expect(response).to have_http_status(400)
+ end
end
describe 'DELETE /projects/:project_id/snippets/:id/' do
+ let(:snippet) { create(:project_snippet, author: admin) }
+
it 'deletes snippet' do
admin = create(:admin)
snippet = create(:project_snippet, author: admin)
@@ -89,18 +112,31 @@ describe API::API, api: true do
expect(response).to have_http_status(200)
end
+
+ it 'returns 404 for invalid snippet id' do
+ delete api("/projects/#{snippet.project.id}/snippets/1234", admin)
+
+ expect(response).to have_http_status(404)
+ expect(json_response['message']).to eq('404 Snippet Not Found')
+ end
end
describe 'GET /projects/:project_id/snippets/:id/raw' do
- it 'returns raw text' do
- admin = create(:admin)
- snippet = create(:project_snippet, author: admin)
+ let(:snippet) { create(:project_snippet, author: admin) }
+ it 'returns raw text' do
get api("/projects/#{snippet.project.id}/snippets/#{snippet.id}/raw", admin)
expect(response).to have_http_status(200)
expect(response.content_type).to eq 'text/plain'
expect(response.body).to eq(snippet.content)
end
+
+ it 'returns 404 for invalid snippet id' do
+ delete api("/projects/#{snippet.project.id}/snippets/1234", admin)
+
+ expect(response).to have_http_status(404)
+ expect(json_response['message']).to eq('404 Snippet Not Found')
+ end
end
end
diff --git a/spec/requests/ci/api/builds_spec.rb b/spec/requests/ci/api/builds_spec.rb
index a611c5e3823..a09d8689ff2 100644
--- a/spec/requests/ci/api/builds_spec.rb
+++ b/spec/requests/ci/api/builds_spec.rb
@@ -17,6 +17,10 @@ describe Ci::API::API do
let!(:build) { create(:ci_build, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let(:user_agent) { 'gitlab-ci-multi-runner 1.5.2 (1-5-stable; go1.6.3; linux/amd64)' }
+ before do
+ stub_container_registry_config(enabled: false)
+ end
+
shared_examples 'no builds available' do
context 'when runner sends version in User-Agent' do
context 'for stable version' do
@@ -53,6 +57,41 @@ describe Ci::API::API do
it 'updates runner info' do
expect { register_builds }.to change { runner.reload.contacted_at }
end
+
+ context 'registry credentials' do
+ let(:registry_credentials) do
+ { 'type' => 'registry',
+ 'url' => 'registry.example.com:5005',
+ 'username' => 'gitlab-ci-token',
+ 'password' => build.token }
+ end
+
+ context 'when registry is enabled' do
+ before do
+ stub_container_registry_config(enabled: true, host_port: 'registry.example.com:5005')
+ end
+
+ it 'sends registry credentials key' do
+ register_builds info: { platform: :darwin }
+
+ expect(json_response).to have_key('credentials')
+ expect(json_response['credentials']).to include(registry_credentials)
+ end
+ end
+
+ context 'when registry is disabled' do
+ before do
+ stub_container_registry_config(enabled: false, host_port: 'registry.example.com:5005')
+ end
+
+ it 'does not send registry credentials' do
+ register_builds info: { platform: :darwin }
+
+ expect(json_response).to have_key('credentials')
+ expect(json_response['credentials']).not_to include(registry_credentials)
+ end
+ end
+ end
end
context 'when builds are finished' do
diff --git a/spec/routing/routing_spec.rb b/spec/routing/routing_spec.rb
index 7aba4f08088..f15c45cbaac 100644
--- a/spec/routing/routing_spec.rb
+++ b/spec/routing/routing_spec.rb
@@ -261,7 +261,7 @@ describe "Authentication", "routing" do
end
describe "Groups", "routing" do
- let(:name) { 'complex.group-name' }
+ let(:name) { 'complex.group-namegit' }
it "to #show" do
expect(get("/groups/#{name}")).to route_to('groups#show', id: name)
diff --git a/spec/services/git_push_service_spec.rb b/spec/services/git_push_service_spec.rb
index 62f9982e840..9d7702f5c96 100644
--- a/spec/services/git_push_service_spec.rb
+++ b/spec/services/git_push_service_spec.rb
@@ -27,27 +27,14 @@ describe GitPushService, services: true do
it { is_expected.to be_truthy }
- it 'flushes general cached data' do
- expect(project.repository).to receive(:expire_cache).
- with('master', newrev)
+ it 'calls the after_push_commit hook' do
+ expect(project.repository).to receive(:after_push_commit).with('master')
subject
end
- it 'flushes the visible content cache' do
- expect(project.repository).to receive(:expire_has_visible_content_cache)
-
- subject
- end
-
- it 'flushes the branches cache' do
- expect(project.repository).to receive(:expire_branches_cache)
-
- subject
- end
-
- it 'flushes the branch count cache' do
- expect(project.repository).to receive(:expire_branch_count_cache)
+ it 'calls the after_create_branch hook' do
+ expect(project.repository).to receive(:after_create_branch)
subject
end
@@ -56,21 +43,8 @@ describe GitPushService, services: true do
context 'existing branch' do
it { is_expected.to be_truthy }
- it 'flushes general cached data' do
- expect(project.repository).to receive(:expire_cache).
- with('master', newrev)
-
- subject
- end
-
- it 'does not flush the branches cache' do
- expect(project.repository).not_to receive(:expire_branches_cache)
-
- subject
- end
-
- it 'does not flush the branch count cache' do
- expect(project.repository).not_to receive(:expire_branch_count_cache)
+ it 'calls the after_push_commit hook' do
+ expect(project.repository).to receive(:after_push_commit).with('master')
subject
end
@@ -81,27 +55,14 @@ describe GitPushService, services: true do
it { is_expected.to be_truthy }
- it 'flushes the visible content cache' do
- expect(project.repository).to receive(:expire_has_visible_content_cache)
-
- subject
- end
-
- it 'flushes the branches cache' do
- expect(project.repository).to receive(:expire_branches_cache)
-
- subject
- end
-
- it 'flushes the branch count cache' do
- expect(project.repository).to receive(:expire_branch_count_cache)
+ it 'calls the after_push_commit hook' do
+ expect(project.repository).to receive(:after_push_commit).with('master')
subject
end
- it 'flushes general cached data' do
- expect(project.repository).to receive(:expire_cache).
- with('master', newrev)
+ it 'calls the after_remove_branch hook' do
+ expect(project.repository).to receive(:after_remove_branch)
subject
end
@@ -598,6 +559,51 @@ describe GitPushService, services: true do
end
end
+ describe '#update_caches' do
+ let(:service) do
+ described_class.new(project,
+ user,
+ oldrev: sample_commit.parent_id,
+ newrev: sample_commit.id,
+ ref: 'refs/heads/master')
+ end
+
+ context 'on the default branch' do
+ before do
+ allow(service).to receive(:is_default_branch?).and_return(true)
+ end
+
+ it 'flushes the caches of any special files that have been changed' do
+ commit = double(:commit)
+ diff = double(:diff, new_path: 'README.md')
+
+ expect(commit).to receive(:raw_diffs).with(deltas_only: true).
+ and_return([diff])
+
+ service.push_commits = [commit]
+
+ expect(ProjectCacheWorker).to receive(:perform_async).
+ with(project.id, %i(readme))
+
+ service.update_caches
+ end
+ end
+
+ context 'on a non-default branch' do
+ before do
+ allow(service).to receive(:is_default_branch?).and_return(false)
+ end
+
+ it 'does not flush any conditional caches' do
+ expect(ProjectCacheWorker).to receive(:perform_async).
+ with(project.id, []).
+ and_call_original
+
+ service.update_caches
+ end
+ end
+ end
+
def execute_service(project, user, oldrev, newrev, ref)
service = described_class.new(project, user, oldrev: oldrev, newrev: newrev, ref: ref )
service.execute
diff --git a/spec/services/git_tag_push_service_spec.rb b/spec/services/git_tag_push_service_spec.rb
index 0879e3ab4c8..bd074b9bd71 100644
--- a/spec/services/git_tag_push_service_spec.rb
+++ b/spec/services/git_tag_push_service_spec.rb
@@ -18,7 +18,7 @@ describe GitTagPushService, services: true do
end
it 'flushes general cached data' do
- expect(project.repository).to receive(:expire_cache)
+ expect(project.repository).to receive(:before_push_tag)
subject
end
@@ -28,12 +28,6 @@ describe GitTagPushService, services: true do
subject
end
-
- it 'flushes the tag count cache' do
- expect(project.repository).to receive(:expire_tag_count_cache)
-
- subject
- end
end
describe "Git Tag Push Data" do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 0220f7e1db2..e515bc9f89c 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -227,16 +227,6 @@ describe MergeRequests::RefreshService, services: true do
end
end
- context 'when the source branch is deleted' do
- it 'does not create a MergeRequestDiff record' do
- refresh_service = service.new(@project, @user)
-
- expect do
- refresh_service.execute(@oldrev, Gitlab::Git::BLANK_SHA, 'refs/heads/master')
- end.not_to change { MergeRequestDiff.count }
- end
- end
-
def reload_mrs
@merge_request.reload
@fork_merge_request.reload
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 150e21574a1..2a5709c6322 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -543,7 +543,10 @@ describe SystemNoteService, services: true do
let(:comment_url) { jira_api_comment_url(jira_issue.id) }
let(:success_message) { "JiraService SUCCESS: Successfully posted to http://jira.example.net." }
- before { stub_jira_urls(jira_issue.id) }
+ before do
+ stub_jira_urls(jira_issue.id)
+ jira_service_settings
+ end
noteable_types = ["merge_requests", "commit"]
@@ -569,16 +572,16 @@ describe SystemNoteService, services: true do
end
end
- context 'in JIRA issue tracker' do
- before { jira_service_settings }
-
- describe "new reference" do
- subject { described_class.cross_reference(jira_issue, commit, author) }
+ describe "new reference" do
+ context 'for commits' do
+ it "creates comment" do
+ result = described_class.cross_reference(jira_issue, commit, author)
- it { is_expected.to eq(success_message) }
+ expect(result).to eq(success_message)
+ end
it "creates remote link" do
- subject
+ described_class.cross_reference(jira_issue, commit, author)
expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
body: hash_including(
@@ -593,18 +596,18 @@ describe SystemNoteService, services: true do
).once
end
end
- end
- context 'in commit' do
- context 'in JIRA issue tracker' do
- before { jira_service_settings }
+ context 'for issues' do
+ let(:issue) { create(:issue, project: project) }
- subject { described_class.cross_reference(jira_issue, issue, author) }
+ it "creates comment" do
+ result = described_class.cross_reference(jira_issue, issue, author)
- it { is_expected.to eq(success_message) }
+ expect(result).to eq(success_message)
+ end
it "creates remote link" do
- subject
+ described_class.cross_reference(jira_issue, issue, author)
expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
body: hash_including(
@@ -619,6 +622,32 @@ describe SystemNoteService, services: true do
).once
end
end
+
+ context 'for snippets' do
+ let(:snippet) { create(:snippet, project: project) }
+
+ it "creates comment" do
+ result = described_class.cross_reference(jira_issue, snippet, author)
+
+ expect(result).to eq(success_message)
+ end
+
+ it "creates remote link" do
+ described_class.cross_reference(jira_issue, snippet, author)
+
+ expect(WebMock).to have_requested(:post, jira_api_remote_link_url(jira_issue)).with(
+ body: hash_including(
+ GlobalID: "GitLab",
+ object: {
+ url: namespace_project_snippet_url(project.namespace, project, snippet),
+ title: "GitLab: Mentioned on snippet - #{snippet.title}",
+ icon: { title: "GitLab", url16x16: "https://gitlab.com/favicon.ico" },
+ status: { resolved: false }
+ }
+ )
+ ).once
+ end
+ end
end
describe "existing reference" do
@@ -627,9 +656,11 @@ describe SystemNoteService, services: true do
allow_any_instance_of(JIRA::Resource::Issue).to receive(:comments).and_return([OpenStruct.new(body: message)])
end
- subject { described_class.cross_reference(jira_issue, commit, author) }
+ it "does not return success message" do
+ result = described_class.cross_reference(jira_issue, commit, author)
- it { is_expected.not_to eq(success_message) }
+ expect(result).not_to eq(success_message)
+ end
it 'does not try to create comment and remote link' do
subject
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index bfa8c0ff2c6..855c28b584e 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -2,62 +2,78 @@ require 'spec_helper'
describe ProjectCacheWorker do
let(:project) { create(:project) }
+ let(:worker) { described_class.new }
- subject { described_class.new }
-
- describe '.perform_async' do
- it 'schedules the job when no lease exists' do
- allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:exists?).
- and_return(false)
+ describe '#perform' do
+ before do
+ allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).
+ and_return(true)
+ end
- expect_any_instance_of(described_class).to receive(:perform)
+ context 'with a non-existing project' do
+ it 'does nothing' do
+ expect(worker).not_to receive(:update_repository_size)
- described_class.perform_async(project.id)
+ worker.perform(-1)
+ end
end
- it 'does not schedule the job when a lease exists' do
- allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:exists?).
- and_return(true)
+ context 'with an existing project without a repository' do
+ it 'does nothing' do
+ allow_any_instance_of(Repository).to receive(:exists?).and_return(false)
- expect_any_instance_of(described_class).not_to receive(:perform)
+ expect(worker).not_to receive(:update_repository_size)
- described_class.perform_async(project.id)
+ worker.perform(project.id)
+ end
end
- end
- describe '#perform' do
- context 'when an exclusive lease can be obtained' do
- before do
- allow(subject).to receive(:try_obtain_lease_for).with(project.id).
- and_return(true)
- end
+ context 'with an existing project' do
+ it 'updates the repository size' do
+ expect(worker).to receive(:update_repository_size).and_call_original
- it 'updates project cache data' do
- expect_any_instance_of(Repository).to receive(:size)
- expect_any_instance_of(Repository).to receive(:commit_count)
+ worker.perform(project.id)
+ end
- expect_any_instance_of(Project).to receive(:update_repository_size)
- expect_any_instance_of(Project).to receive(:update_commit_count)
+ it 'updates the commit count' do
+ expect_any_instance_of(Project).to receive(:update_commit_count).
+ and_call_original
- subject.perform(project.id)
+ worker.perform(project.id)
end
- it 'handles missing repository data' do
- expect_any_instance_of(Repository).to receive(:exists?).and_return(false)
- expect_any_instance_of(Repository).not_to receive(:size)
+ it 'refreshes the method caches' do
+ expect_any_instance_of(Repository).to receive(:refresh_method_caches).
+ with(%i(readme)).
+ and_call_original
- subject.perform(project.id)
+ worker.perform(project.id, %i(readme))
end
end
+ end
- context 'when an exclusive lease can not be obtained' do
- it 'does nothing' do
- allow(subject).to receive(:try_obtain_lease_for).with(project.id).
+ describe '#update_repository_size' do
+ context 'when a lease could not be obtained' do
+ it 'does not update the repository size' do
+ allow(worker).to receive(:try_obtain_lease_for).
+ with(project.id, :update_repository_size).
and_return(false)
- expect(subject).not_to receive(:update_caches)
+ expect(project).not_to receive(:update_repository_size)
+
+ worker.update_repository_size(project)
+ end
+ end
+
+ context 'when a lease could be obtained' do
+ it 'updates the repository size' do
+ allow(worker).to receive(:try_obtain_lease_for).
+ with(project.id, :update_repository_size).
+ and_return(true)
+
+ expect(project).to receive(:update_repository_size).and_call_original
- subject.perform(project.id)
+ worker.update_repository_size(project)
end
end
end