summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/assets/javascripts/api.js35
-rw-r--r--app/assets/javascripts/clusters_list/components/clusters.vue4
-rw-r--r--app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue3
-rw-r--r--app/assets/javascripts/logs/components/environment_logs.vue222
-rw-r--r--app/assets/javascripts/logs/components/log_control_buttons.vue93
-rw-r--r--app/assets/javascripts/logs/index.js24
-rw-r--r--app/assets/javascripts/logs/stores/actions.js114
-rw-r--r--app/assets/javascripts/logs/stores/getters.js9
-rw-r--r--app/assets/javascripts/logs/stores/index.js23
-rw-r--r--app/assets/javascripts/logs/stores/mutation_types.js16
-rw-r--r--app/assets/javascripts/logs/stores/mutations.js61
-rw-r--r--app/assets/javascripts/logs/stores/state.js42
-rw-r--r--app/assets/javascripts/logs/utils.js23
-rw-r--r--app/assets/javascripts/pages/projects/logs/index.js3
-rw-r--r--app/assets/stylesheets/framework/common.scss2
-rw-r--r--app/assets/stylesheets/pages/builds.scss39
-rw-r--r--app/controllers/projects/logs_controller.rb79
-rw-r--r--app/helpers/environments_helper.rb9
-rw-r--r--app/models/environment.rb4
-rw-r--r--app/models/snippet_repository.rb6
-rw-r--r--app/policies/project_policy.rb1
-rw-r--r--app/serializers/environment_entity.rb20
-rw-r--r--app/services/pod_logs/base_service.rb132
-rw-r--r--app/services/pod_logs/elasticsearch_service.rb63
-rw-r--r--app/services/pod_logs/kubernetes_service.rb88
-rw-r--r--app/views/layouts/nav/sidebar/_project.html.haml6
-rw-r--r--app/views/projects/logs/empty_logs.html.haml14
-rw-r--r--app/views/projects/logs/index.html.haml1
-rw-r--r--changelogs/unreleased/ak-move-logs-to-core.yml5
-rw-r--r--changelogs/unreleased/mo-fix-capybara-screenshots-rails.yml5
-rw-r--r--config/gitlab.yml.example2
-rw-r--r--config/routes/project.rb7
-rw-r--r--doc/development/i18n/proofreader.md1
-rw-r--r--doc/user/admin_area/settings/continuous_integration.md13
-rw-r--r--doc/user/admin_area/settings/img/admin_package_registry_npm_package_requests_forward.pngbin0 -> 77490 bytes
-rw-r--r--doc/user/incident_management/index.md5
-rw-r--r--doc/user/packages/npm_registry/index.md8
-rw-r--r--doc/user/project/clusters/index.md4
-rw-r--r--doc/user/project/clusters/kubernetes_pod_logs.md3
-rw-r--r--lib/gitlab/background_migration/backfill_snippet_repositories.rb89
-rw-r--r--lib/gitlab/elasticsearch/logs.rb115
-rw-r--r--lib/gitlab/usage_counters/common.rb30
-rw-r--r--lib/gitlab/usage_counters/pod_logs.rb11
-rw-r--r--locale/gitlab.pot15
-rw-r--r--package.json4
-rw-r--r--qa/qa/resource/ssh_key.rb31
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb2
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb18
-rw-r--r--qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb2
-rw-r--r--spec/controllers/projects/logs_controller_spec.rb143
-rw-r--r--spec/features/projects/navbar_spec.rb1
-rw-r--r--spec/fixtures/api/schemas/environment.json3
-rw-r--r--spec/fixtures/lib/elasticsearch/logs_response.json73
-rw-r--r--spec/fixtures/lib/elasticsearch/query.json39
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_container.json46
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_end_time.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_search.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_start_time.json48
-rw-r--r--spec/fixtures/lib/elasticsearch/query_with_times.json49
-rw-r--r--spec/frontend/blob/sketch/index_spec.js92
-rw-r--r--spec/frontend/clusters_list/mock_data.js5
-rw-r--r--spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js142
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js334
-rw-r--r--spec/frontend/logs/components/log_control_buttons_spec.js108
-rw-r--r--spec/frontend/logs/mock_data.js85
-rw-r--r--spec/frontend/logs/stores/actions_spec.js324
-rw-r--r--spec/frontend/logs/stores/getters_spec.js40
-rw-r--r--spec/frontend/logs/stores/mutations_spec.js171
-rw-r--r--spec/frontend/logs/utils_spec.js38
-rw-r--r--spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js (renamed from spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js)2
-rw-r--r--spec/javascripts/blob/sketch/index_spec.js120
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb154
-rw-r--r--spec/lib/gitlab/elasticsearch/logs_spec.rb80
-rw-r--r--spec/models/environment_spec.rb35
-rw-r--r--spec/models/snippet_repository_spec.rb38
-rw-r--r--spec/serializers/environment_entity_spec.rb20
-rw-r--r--spec/services/pod_logs/base_service_spec.rb229
-rw-r--r--spec/services/pod_logs/elasticsearch_service_spec.rb174
-rw-r--r--spec/services/pod_logs/kubernetes_service_spec.rb166
-rw-r--r--spec/support/capybara.rb2
-rw-r--r--yarn.lock8
81 files changed, 4116 insertions, 255 deletions
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js
index dc6ea148047..c85e5b68f5f 100644
--- a/app/assets/javascripts/api.js
+++ b/app/assets/javascripts/api.js
@@ -492,6 +492,41 @@ const Api = {
buildUrl(url) {
return joinPaths(gon.relative_url_root || '', url.replace(':version', gon.api_version));
},
+
+ /**
+ * Returns pods logs for an environment with an optional pod and container
+ *
+ * @param {Object} params
+ * @param {Object} param.environment - Environment object
+ * @param {string=} params.podName - Pod name, if not set the backend assumes a default one
+ * @param {string=} params.containerName - Container name, if not set the backend assumes a default one
+ * @param {string=} params.start - Starting date to query the logs in ISO format
+ * @param {string=} params.end - Ending date to query the logs in ISO format
+ * @returns {Promise} Axios promise for the result of a GET request of logs
+ */
+ getPodLogs({ environment, podName, containerName, search, start, end }) {
+ const url = this.buildUrl(environment.logs_api_path);
+
+ const params = {};
+
+ if (podName) {
+ params.pod_name = podName;
+ }
+ if (containerName) {
+ params.container_name = containerName;
+ }
+ if (search) {
+ params.search = search;
+ }
+ if (start) {
+ params.start = start;
+ }
+ if (end) {
+ params.end = end;
+ }
+
+ return axios.get(url, { params });
+ },
};
export default Api;
diff --git a/app/assets/javascripts/clusters_list/components/clusters.vue b/app/assets/javascripts/clusters_list/components/clusters.vue
index f9f23fd556f..46dacf30f39 100644
--- a/app/assets/javascripts/clusters_list/components/clusters.vue
+++ b/app/assets/javascripts/clusters_list/components/clusters.vue
@@ -28,6 +28,10 @@ export default {
label: __('Size'),
},
{
+ key: 'cpu',
+ label: __('Total cores (vCPUs)'),
+ },
+ {
key: 'memory',
label: __('Total memory (GB)'),
},
diff --git a/app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue b/app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue
index 2f7fcfcb755..e9d484bdd94 100644
--- a/app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue
+++ b/app/assets/javascripts/create_cluster/components/cluster_form_dropdown.vue
@@ -1,11 +1,12 @@
<script>
+import { isNil } from 'lodash';
import $ from 'jquery';
import { GlIcon } from '@gitlab/ui';
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
import DropdownHiddenInput from '~/vue_shared/components/dropdown/dropdown_hidden_input.vue';
import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
-const toArray = value => [].concat(value);
+const toArray = value => (isNil(value) ? [] : [].concat(value));
const itemsProp = (items, prop) => items.map(item => item[prop]);
const defaultSearchFn = (searchQuery, labelProp) => item =>
item[labelProp].toLowerCase().indexOf(searchQuery) > -1;
diff --git a/app/assets/javascripts/logs/components/environment_logs.vue b/app/assets/javascripts/logs/components/environment_logs.vue
new file mode 100644
index 00000000000..b94cd2bcec4
--- /dev/null
+++ b/app/assets/javascripts/logs/components/environment_logs.vue
@@ -0,0 +1,222 @@
+<script>
+import { mapActions, mapState, mapGetters } from 'vuex';
+import { GlDropdown, GlDropdownItem, GlFormGroup, GlSearchBoxByClick, GlAlert } from '@gitlab/ui';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import { scrollDown } from '~/lib/utils/scroll_utils';
+import LogControlButtons from './log_control_buttons.vue';
+
+import { timeRanges, defaultTimeRange } from '~/monitoring/constants';
+import { timeRangeFromUrl } from '~/monitoring/utils';
+
+export default {
+ components: {
+ GlAlert,
+ GlDropdown,
+ GlDropdownItem,
+ GlFormGroup,
+ GlSearchBoxByClick,
+ DateTimePicker,
+ LogControlButtons,
+ },
+ props: {
+ environmentName: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ currentPodName: {
+ type: [String, null],
+ required: false,
+ default: null,
+ },
+ environmentsPath: {
+ type: String,
+ required: false,
+ default: '',
+ },
+ clusterApplicationsDocumentationPath: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ searchQuery: '',
+ timeRanges,
+ isElasticStackCalloutDismissed: false,
+ };
+ },
+ computed: {
+ ...mapState('environmentLogs', ['environments', 'timeRange', 'logs', 'pods']),
+ ...mapGetters('environmentLogs', ['trace']),
+
+ timeRangeModel: {
+ get() {
+ return this.timeRange.current;
+ },
+ set(val) {
+ this.setTimeRange(val);
+ },
+ },
+
+ showLoader() {
+ return this.logs.isLoading || !this.logs.isComplete;
+ },
+ advancedFeaturesEnabled() {
+ const environment = this.environments.options.find(
+ ({ name }) => name === this.environments.current,
+ );
+ return environment && environment.enable_advanced_logs_querying;
+ },
+ disableAdvancedControls() {
+ return this.environments.isLoading || !this.advancedFeaturesEnabled;
+ },
+ shouldShowElasticStackCallout() {
+ return !this.isElasticStackCalloutDismissed && this.disableAdvancedControls;
+ },
+ },
+ watch: {
+ trace(val) {
+ this.$nextTick(() => {
+ if (val) {
+ scrollDown();
+ }
+ this.$refs.scrollButtons.update();
+ });
+ },
+ },
+ mounted() {
+ this.setInitData({
+ timeRange: timeRangeFromUrl() || defaultTimeRange,
+ environmentName: this.environmentName,
+ podName: this.currentPodName,
+ });
+
+ this.fetchEnvironments(this.environmentsPath);
+ },
+ methods: {
+ ...mapActions('environmentLogs', [
+ 'setInitData',
+ 'setSearch',
+ 'setTimeRange',
+ 'showPodLogs',
+ 'showEnvironment',
+ 'fetchEnvironments',
+ ]),
+ },
+};
+</script>
+<template>
+ <div class="build-page-pod-logs mt-3">
+ <gl-alert
+ v-if="shouldShowElasticStackCallout"
+ class="mb-3 js-elasticsearch-alert"
+ @dismiss="isElasticStackCalloutDismissed = true"
+ >
+ {{
+ s__(
+ 'Environments|Install Elastic Stack on your cluster to enable advanced querying capabilities such as full text search.',
+ )
+ }}
+ <a :href="clusterApplicationsDocumentationPath">
+ <strong>
+ {{ s__('View Documentation') }}
+ </strong>
+ </a>
+ </gl-alert>
+ <div class="top-bar js-top-bar d-flex">
+ <div class="row mx-n1">
+ <gl-form-group
+ id="environments-dropdown-fg"
+ :label="s__('Environments|Environment')"
+ label-size="sm"
+ label-for="environments-dropdown"
+ class="col-3 px-1"
+ >
+ <gl-dropdown
+ id="environments-dropdown"
+ :text="environments.current"
+ :disabled="environments.isLoading"
+ class="d-flex gl-h-32 js-environments-dropdown"
+ toggle-class="dropdown-menu-toggle"
+ >
+ <gl-dropdown-item
+ v-for="env in environments.options"
+ :key="env.id"
+ @click="showEnvironment(env.name)"
+ >
+ {{ env.name }}
+ </gl-dropdown-item>
+ </gl-dropdown>
+ </gl-form-group>
+ <gl-form-group
+ id="pods-dropdown-fg"
+ :label="s__('Environments|Logs from')"
+ label-size="sm"
+ label-for="pods-dropdown"
+ class="col-3 px-1"
+ >
+ <gl-dropdown
+ id="pods-dropdown"
+ :text="pods.current || s__('Environments|No pods to display')"
+ :disabled="environments.isLoading"
+ class="d-flex gl-h-32 js-pods-dropdown"
+ toggle-class="dropdown-menu-toggle"
+ >
+ <gl-dropdown-item
+ v-for="podName in pods.options"
+ :key="podName"
+ @click="showPodLogs(podName)"
+ >
+ {{ podName }}
+ </gl-dropdown-item>
+ </gl-dropdown>
+ </gl-form-group>
+ <gl-form-group
+ id="dates-fg"
+ :label="s__('Environments|Show last')"
+ label-size="sm"
+ label-for="time-window-dropdown"
+ class="col-3 px-1"
+ >
+ <date-time-picker
+ ref="dateTimePicker"
+ v-model="timeRangeModel"
+ class="w-100 gl-h-32"
+ :disabled="disableAdvancedControls"
+ :options="timeRanges"
+ />
+ </gl-form-group>
+ <gl-form-group
+ id="search-fg"
+ :label="s__('Environments|Search')"
+ label-size="sm"
+ label-for="search"
+ class="col-3 px-1"
+ >
+ <gl-search-box-by-click
+ v-model.trim="searchQuery"
+ :disabled="disableAdvancedControls"
+ :placeholder="s__('Environments|Search')"
+ class="js-logs-search"
+ type="search"
+ autofocus
+ @submit="setSearch(searchQuery)"
+ />
+ </gl-form-group>
+ </div>
+
+ <log-control-buttons
+ ref="scrollButtons"
+ class="controllers align-self-end mb-1"
+ @refresh="showPodLogs(pods.current)"
+ />
+ </div>
+ <pre class="build-trace js-log-trace"><code class="bash js-build-output">{{trace}}
+ <div v-if="showLoader" class="build-loader-animation js-build-loader-animation">
+ <div class="dot"></div>
+ <div class="dot"></div>
+ <div class="dot"></div>
+ </div></code></pre>
+ </div>
+</template>
diff --git a/app/assets/javascripts/logs/components/log_control_buttons.vue b/app/assets/javascripts/logs/components/log_control_buttons.vue
new file mode 100644
index 00000000000..d55c2f7cd4c
--- /dev/null
+++ b/app/assets/javascripts/logs/components/log_control_buttons.vue
@@ -0,0 +1,93 @@
+<script>
+import { GlButton, GlTooltipDirective } from '@gitlab/ui';
+import {
+ canScroll,
+ isScrolledToTop,
+ isScrolledToBottom,
+ scrollDown,
+ scrollUp,
+} from '~/lib/utils/scroll_utils';
+import Icon from '~/vue_shared/components/icon.vue';
+
+export default {
+ components: {
+ Icon,
+ GlButton,
+ },
+ directives: {
+ GlTooltip: GlTooltipDirective,
+ },
+ data() {
+ return {
+ scrollToTopEnabled: false,
+ scrollToBottomEnabled: false,
+ };
+ },
+ created() {
+ window.addEventListener('scroll', this.update);
+ },
+ destroyed() {
+ window.removeEventListener('scroll', this.update);
+ },
+ methods: {
+ /**
+ * Checks if page can be scrolled and updates
+ * enabled/disabled state of buttons accordingly
+ */
+ update() {
+ this.scrollToTopEnabled = canScroll() && !isScrolledToTop();
+ this.scrollToBottomEnabled = canScroll() && !isScrolledToBottom();
+ },
+ handleRefreshClick() {
+ this.$emit('refresh');
+ },
+ scrollUp,
+ scrollDown,
+ },
+};
+</script>
+
+<template>
+ <div>
+ <div
+ v-gl-tooltip
+ class="controllers-buttons"
+ :title="__('Scroll to top')"
+ aria-labelledby="scroll-to-top"
+ >
+ <gl-button
+ id="scroll-to-top"
+ class="btn-blank js-scroll-to-top"
+ :aria-label="__('Scroll to top')"
+ :disabled="!scrollToTopEnabled"
+ @click="scrollUp()"
+ ><icon name="scroll_up"
+ /></gl-button>
+ </div>
+ <div
+ v-gl-tooltip
+ class="controllers-buttons"
+ :title="__('Scroll to bottom')"
+ aria-labelledby="scroll-to-bottom"
+ >
+ <gl-button
+ id="scroll-to-bottom"
+ class="btn-blank js-scroll-to-bottom"
+ :aria-label="__('Scroll to bottom')"
+ :disabled="!scrollToBottomEnabled"
+ @click="scrollDown()"
+ ><icon name="scroll_down"
+ /></gl-button>
+ </div>
+ <gl-button
+ id="refresh-log"
+ v-gl-tooltip
+ class="ml-1 px-2 js-refresh-log"
+ :title="__('Refresh')"
+ :aria-label="__('Refresh')"
+ @click="handleRefreshClick"
+ >
+ <icon name="retry" />
+ </gl-button>
+ </div>
+</template>
diff --git a/app/assets/javascripts/logs/index.js b/app/assets/javascripts/logs/index.js
new file mode 100644
index 00000000000..70dbffdc3dd
--- /dev/null
+++ b/app/assets/javascripts/logs/index.js
@@ -0,0 +1,24 @@
+import Vue from 'vue';
+import { getParameterValues } from '~/lib/utils/url_utility';
+import LogViewer from './components/environment_logs.vue';
+import store from './stores';
+
+export default (props = {}) => {
+ const el = document.getElementById('environment-logs');
+ const [currentPodName] = getParameterValues('pod_name');
+
+ // eslint-disable-next-line no-new
+ new Vue({
+ el,
+ store,
+ render(createElement) {
+ return createElement(LogViewer, {
+ props: {
+ ...el.dataset,
+ currentPodName,
+ ...props,
+ },
+ });
+ },
+ });
+};
diff --git a/app/assets/javascripts/logs/stores/actions.js b/app/assets/javascripts/logs/stores/actions.js
new file mode 100644
index 00000000000..89a896b9dec
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/actions.js
@@ -0,0 +1,114 @@
+import Api from '~/api';
+import { backOff } from '~/lib/utils/common_utils';
+import httpStatusCodes from '~/lib/utils/http_status';
+import axios from '~/lib/utils/axios_utils';
+import flash from '~/flash';
+import { s__ } from '~/locale';
+import { convertToFixedRange } from '~/lib/utils/datetime_range';
+
+import * as types from './mutation_types';
+
+const flashTimeRangeWarning = () => {
+ flash(s__('Metrics|Invalid time range, please verify.'), 'warning');
+};
+
+const flashLogsError = () => {
+ flash(s__('Metrics|There was an error fetching the logs, please try again'));
+};
+
+const requestLogsUntilData = params =>
+ backOff((next, stop) => {
+ Api.getPodLogs(params)
+ .then(res => {
+ if (res.status === httpStatusCodes.ACCEPTED) {
+ next();
+ return;
+ }
+ stop(res);
+ })
+ .catch(err => {
+ stop(err);
+ });
+ });
+
+export const setInitData = ({ commit }, { timeRange, environmentName, podName }) => {
+ if (timeRange) {
+ commit(types.SET_TIME_RANGE, timeRange);
+ }
+ commit(types.SET_PROJECT_ENVIRONMENT, environmentName);
+ commit(types.SET_CURRENT_POD_NAME, podName);
+};
+
+export const showPodLogs = ({ dispatch, commit }, podName) => {
+ commit(types.SET_CURRENT_POD_NAME, podName);
+ dispatch('fetchLogs');
+};
+
+export const setSearch = ({ dispatch, commit }, searchQuery) => {
+ commit(types.SET_SEARCH, searchQuery);
+ dispatch('fetchLogs');
+};
+
+export const setTimeRange = ({ dispatch, commit }, timeRange) => {
+ commit(types.SET_TIME_RANGE, timeRange);
+ dispatch('fetchLogs');
+};
+
+export const showEnvironment = ({ dispatch, commit }, environmentName) => {
+ commit(types.SET_PROJECT_ENVIRONMENT, environmentName);
+ commit(types.SET_CURRENT_POD_NAME, null);
+ dispatch('fetchLogs');
+};
+
+export const fetchEnvironments = ({ commit, dispatch }, environmentsPath) => {
+ commit(types.REQUEST_ENVIRONMENTS_DATA);
+
+ axios
+ .get(environmentsPath)
+ .then(({ data }) => {
+ commit(types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, data.environments);
+ dispatch('fetchLogs');
+ })
+ .catch(() => {
+ commit(types.RECEIVE_ENVIRONMENTS_DATA_ERROR);
+ flash(s__('Metrics|There was an error fetching the environments data, please try again'));
+ });
+};
+
+export const fetchLogs = ({ commit, state }) => {
+ const params = {
+ environment: state.environments.options.find(({ name }) => name === state.environments.current),
+ podName: state.pods.current,
+ search: state.search,
+ };
+
+ if (state.timeRange.current) {
+ try {
+ const { start, end } = convertToFixedRange(state.timeRange.current);
+ params.start = start;
+ params.end = end;
+ } catch {
+ flashTimeRangeWarning();
+ }
+ }
+
+ commit(types.REQUEST_PODS_DATA);
+ commit(types.REQUEST_LOGS_DATA);
+
+ return requestLogsUntilData(params)
+ .then(({ data }) => {
+ const { pod_name, pods, logs } = data;
+ commit(types.SET_CURRENT_POD_NAME, pod_name);
+
+ commit(types.RECEIVE_PODS_DATA_SUCCESS, pods);
+ commit(types.RECEIVE_LOGS_DATA_SUCCESS, logs);
+ })
+ .catch(() => {
+ commit(types.RECEIVE_PODS_DATA_ERROR);
+ commit(types.RECEIVE_LOGS_DATA_ERROR);
+ flashLogsError();
+ });
+};
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/app/assets/javascripts/logs/stores/getters.js b/app/assets/javascripts/logs/stores/getters.js
new file mode 100644
index 00000000000..c7dbb72ce3d
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/getters.js
@@ -0,0 +1,9 @@
+import dateFormat from 'dateformat';
+
+export const trace = state =>
+ state.logs.lines
+ .map(item => [dateFormat(item.timestamp, 'UTC:mmm dd HH:MM:ss.l"Z"'), item.message].join(' | '))
+ .join('\n');
+
+// prevent babel-plugin-rewire from generating an invalid default during karma tests
+export default () => {};
diff --git a/app/assets/javascripts/logs/stores/index.js b/app/assets/javascripts/logs/stores/index.js
new file mode 100644
index 00000000000..d16941ddf93
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/index.js
@@ -0,0 +1,23 @@
+import Vue from 'vue';
+import Vuex from 'vuex';
+import * as actions from './actions';
+import * as getters from './getters';
+import mutations from './mutations';
+import state from './state';
+
+Vue.use(Vuex);
+
+export const createStore = () =>
+ new Vuex.Store({
+ modules: {
+ environmentLogs: {
+ namespaced: true,
+ actions,
+ mutations,
+ state: state(),
+ getters,
+ },
+ },
+ });
+
+export default createStore;
diff --git a/app/assets/javascripts/logs/stores/mutation_types.js b/app/assets/javascripts/logs/stores/mutation_types.js
new file mode 100644
index 00000000000..b8e70f95d92
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/mutation_types.js
@@ -0,0 +1,16 @@
+export const SET_PROJECT_ENVIRONMENT = 'SET_PROJECT_ENVIRONMENT';
+export const SET_SEARCH = 'SET_SEARCH';
+export const SET_TIME_RANGE = 'SET_TIME_RANGE';
+export const SET_CURRENT_POD_NAME = 'SET_CURRENT_POD_NAME';
+
+export const REQUEST_ENVIRONMENTS_DATA = 'REQUEST_ENVIRONMENTS_DATA';
+export const RECEIVE_ENVIRONMENTS_DATA_SUCCESS = 'RECEIVE_ENVIRONMENTS_DATA_SUCCESS';
+export const RECEIVE_ENVIRONMENTS_DATA_ERROR = 'RECEIVE_ENVIRONMENTS_DATA_ERROR';
+
+export const REQUEST_LOGS_DATA = 'REQUEST_LOGS_DATA';
+export const RECEIVE_LOGS_DATA_SUCCESS = 'RECEIVE_LOGS_DATA_SUCCESS';
+export const RECEIVE_LOGS_DATA_ERROR = 'RECEIVE_LOGS_DATA_ERROR';
+
+export const REQUEST_PODS_DATA = 'REQUEST_PODS_DATA';
+export const RECEIVE_PODS_DATA_SUCCESS = 'RECEIVE_PODS_DATA_SUCCESS';
+export const RECEIVE_PODS_DATA_ERROR = 'RECEIVE_PODS_DATA_ERROR';
diff --git a/app/assets/javascripts/logs/stores/mutations.js b/app/assets/javascripts/logs/stores/mutations.js
new file mode 100644
index 00000000000..ca31dd3bc20
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/mutations.js
@@ -0,0 +1,61 @@
+import * as types from './mutation_types';
+
+export default {
+ /** Search data */
+ [types.SET_SEARCH](state, searchQuery) {
+ state.search = searchQuery;
+ },
+
+ /** Time Range data */
+ [types.SET_TIME_RANGE](state, timeRange) {
+ state.timeRange.current = timeRange;
+ },
+
+ /** Environments data */
+ [types.SET_PROJECT_ENVIRONMENT](state, environmentName) {
+ state.environments.current = environmentName;
+ },
+ [types.REQUEST_ENVIRONMENTS_DATA](state) {
+ state.environments.options = [];
+ state.environments.isLoading = true;
+ },
+ [types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS](state, environmentOptions) {
+ state.environments.options = environmentOptions;
+ state.environments.isLoading = false;
+ },
+ [types.RECEIVE_ENVIRONMENTS_DATA_ERROR](state) {
+ state.environments.options = [];
+ state.environments.isLoading = false;
+ },
+
+ /** Logs data */
+ [types.REQUEST_LOGS_DATA](state) {
+ state.logs.lines = [];
+ state.logs.isLoading = true;
+ state.logs.isComplete = false;
+ },
+ [types.RECEIVE_LOGS_DATA_SUCCESS](state, lines) {
+ state.logs.lines = lines;
+ state.logs.isLoading = false;
+ state.logs.isComplete = true;
+ },
+ [types.RECEIVE_LOGS_DATA_ERROR](state) {
+ state.logs.lines = [];
+ state.logs.isLoading = false;
+ state.logs.isComplete = true;
+ },
+
+ /** Pods data */
+ [types.SET_CURRENT_POD_NAME](state, podName) {
+ state.pods.current = podName;
+ },
+ [types.REQUEST_PODS_DATA](state) {
+ state.pods.options = [];
+ },
+ [types.RECEIVE_PODS_DATA_SUCCESS](state, podOptions) {
+ state.pods.options = podOptions;
+ },
+ [types.RECEIVE_PODS_DATA_ERROR](state) {
+ state.pods.options = [];
+ },
+};
diff --git a/app/assets/javascripts/logs/stores/state.js b/app/assets/javascripts/logs/stores/state.js
new file mode 100644
index 00000000000..eaf1b1bdd93
--- /dev/null
+++ b/app/assets/javascripts/logs/stores/state.js
@@ -0,0 +1,42 @@
+import { timeRanges, defaultTimeRange } from '~/monitoring/constants';
+
+export default () => ({
+ /**
+ * Full text search
+ */
+ search: '',
+
+ /**
+ * Time range (Show last)
+ */
+ timeRange: {
+ options: timeRanges,
+ current: defaultTimeRange,
+ },
+
+ /**
+ * Environments list information
+ */
+ environments: {
+ options: [],
+ isLoading: false,
+ current: null,
+ },
+
+ /**
+ * Logs including trace
+ */
+ logs: {
+ lines: [],
+ isLoading: false,
+ isComplete: true,
+ },
+
+ /**
+ * Pods list information
+ */
+ pods: {
+ options: [],
+ current: null,
+ },
+});
diff --git a/app/assets/javascripts/logs/utils.js b/app/assets/javascripts/logs/utils.js
new file mode 100644
index 00000000000..668efee74e8
--- /dev/null
+++ b/app/assets/javascripts/logs/utils.js
@@ -0,0 +1,23 @@
+import { secondsToMilliseconds } from '~/lib/utils/datetime_utility';
+
+/**
+ * Returns a time range (`start`, `end`) where `start` is the
+ * current time minus a given number of seconds and `end`
+ * is the current time (`now()`).
+ *
+ * @param {Number} seconds Seconds duration, defaults to 0.
+ * @returns {Object} range Time range
+ * @returns {String} range.start ISO String of current time minus given seconds
+ * @returns {String} range.end ISO String of current time
+ */
+export const getTimeRange = (seconds = 0) => {
+ const end = Math.floor(Date.now() / 1000); // convert milliseconds to seconds
+ const start = end - seconds;
+
+ return {
+ start: new Date(secondsToMilliseconds(start)).toISOString(),
+ end: new Date(secondsToMilliseconds(end)).toISOString(),
+ };
+};
+
+export default {};
diff --git a/app/assets/javascripts/pages/projects/logs/index.js b/app/assets/javascripts/pages/projects/logs/index.js
new file mode 100644
index 00000000000..36747069ebb
--- /dev/null
+++ b/app/assets/javascripts/pages/projects/logs/index.js
@@ -0,0 +1,3 @@
+import logsBundle from '~/logs';
+
+document.addEventListener('DOMContentLoaded', logsBundle);
diff --git a/app/assets/stylesheets/framework/common.scss b/app/assets/stylesheets/framework/common.scss
index 20846502e85..4d8ae8a5652 100644
--- a/app/assets/stylesheets/framework/common.scss
+++ b/app/assets/stylesheets/framework/common.scss
@@ -524,6 +524,8 @@ img.emoji {
cursor: pointer;
}
+.cursor-not-allowed { cursor: not-allowed; }
+
// this needs to use "!important" due to some very specific styles
// around buttons
.cursor-default {
diff --git a/app/assets/stylesheets/pages/builds.scss b/app/assets/stylesheets/pages/builds.scss
index 0db90fc88fc..59266af96b4 100644
--- a/app/assets/stylesheets/pages/builds.scss
+++ b/app/assets/stylesheets/pages/builds.scss
@@ -357,3 +357,42 @@
}
}
}
+
+.build-page-pod-logs {
+ .build-trace-container {
+ position: relative;
+ }
+
+ .build-trace {
+ @include build-trace();
+ }
+
+ .top-bar {
+ @include build-trace-top-bar($gl-line-height * 5);
+
+ .dropdown-menu-toggle {
+ width: 200px;
+
+ @include media-breakpoint-up(sm) {
+ width: 300px;
+ }
+ }
+
+ .controllers {
+ @include build-controllers(16px, flex-end, true, 2);
+ }
+
+ .refresh-control {
+ @include build-controllers(16px, flex-end, true, 0);
+ margin-left: 2px;
+ }
+ }
+
+ .btn-refresh svg {
+ top: 0;
+ }
+
+ .build-loader-animation {
+ @include build-loader-animation;
+ }
+}
diff --git a/app/controllers/projects/logs_controller.rb b/app/controllers/projects/logs_controller.rb
new file mode 100644
index 00000000000..1b0fdf2a337
--- /dev/null
+++ b/app/controllers/projects/logs_controller.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+module Projects
+ class LogsController < Projects::ApplicationController
+ before_action :authorize_read_pod_logs!
+ before_action :environment
+ before_action :ensure_deployments, only: %i(k8s elasticsearch)
+
+ def index
+ if environment.nil?
+ render :empty_logs
+ else
+ render :index
+ end
+ end
+
+ def k8s
+ render_logs(::PodLogs::KubernetesService, k8s_params)
+ end
+
+ def elasticsearch
+ render_logs(::PodLogs::ElasticsearchService, elasticsearch_params)
+ end
+
+ private
+
+ def render_logs(service, permitted_params)
+ ::Gitlab::UsageCounters::PodLogs.increment(project.id)
+ ::Gitlab::PollingInterval.set_header(response, interval: 3_000)
+
+ result = service.new(cluster, namespace, params: permitted_params).execute
+
+ if result.nil?
+ head :accepted
+ elsif result[:status] == :success
+ render json: result
+ else
+ render status: :bad_request, json: result
+ end
+ end
+
+ def index_params
+ params.permit(:environment_name)
+ end
+
+ def k8s_params
+ params.permit(:container_name, :pod_name)
+ end
+
+ def elasticsearch_params
+ params.permit(:container_name, :pod_name, :search, :start, :end)
+ end
+
+ def environment
+ @environment ||= if index_params.key?(:environment_name)
+ EnvironmentsFinder.new(project, current_user, name: index_params[:environment_name]).find.first
+ else
+ project.default_environment
+ end
+ end
+
+ def cluster
+ environment.deployment_platform&.cluster
+ end
+
+ def namespace
+ environment.deployment_namespace
+ end
+
+ def ensure_deployments
+ return if cluster && namespace.present?
+
+ render status: :bad_request, json: {
+ status: :error,
+ message: _('Environment does not have deployments')
+ }
+ end
+ end
+end
diff --git a/app/helpers/environments_helper.rb b/app/helpers/environments_helper.rb
index fd330d4efd9..6bf920448a5 100644
--- a/app/helpers/environments_helper.rb
+++ b/app/helpers/environments_helper.rb
@@ -41,4 +41,13 @@ module EnvironmentsHelper
"external-dashboard-url" => project.metrics_setting_external_dashboard_url
}
end
+
+ def environment_logs_data(project, environment)
+ {
+ "environment-name": environment.name,
+ "environments-path": project_environments_path(project, format: :json),
+ "environment-id": environment.id,
+ "cluster-applications-documentation-path" => help_page_path('user/clusters/applications.md', anchor: 'elastic-stack')
+ }
+ end
end
diff --git a/app/models/environment.rb b/app/models/environment.rb
index 0e2962b893a..3f9247b1544 100644
--- a/app/models/environment.rb
+++ b/app/models/environment.rb
@@ -330,6 +330,10 @@ class Environment < ApplicationRecord
self.auto_stop_at = parsed_result.seconds.from_now
end
+ def elastic_stack_available?
+ !!deployment_platform&.cluster&.application_elastic_stack&.available?
+ end
+
private
def has_metrics_and_can_query?
diff --git a/app/models/snippet_repository.rb b/app/models/snippet_repository.rb
index 70f26001b5f..f879f58b5a3 100644
--- a/app/models/snippet_repository.rb
+++ b/app/models/snippet_repository.rb
@@ -18,12 +18,6 @@ class SnippetRepository < ApplicationRecord
end
end
- def create_file(user, path, content, **options)
- options[:actions] = transform_file_entries([{ file_path: path, content: content }])
-
- capture_git_error { repository.multi_action(user, **options) }
- end
-
def multi_files_action(user, files = [], **options)
return if files.nil? || files.empty?
diff --git a/app/policies/project_policy.rb b/app/policies/project_policy.rb
index 15d60fe9cd8..95b92d4c108 100644
--- a/app/policies/project_policy.rb
+++ b/app/policies/project_policy.rb
@@ -314,6 +314,7 @@ class ProjectPolicy < BasePolicy
enable :admin_operations
enable :read_deploy_token
enable :create_deploy_token
+ enable :read_pod_logs
end
rule { (mirror_available & can?(:admin_project)) | admin }.enable :admin_remote_mirror
diff --git a/app/serializers/environment_entity.rb b/app/serializers/environment_entity.rb
index 74d6806e83f..d9af7af8a8b 100644
--- a/app/serializers/environment_entity.rb
+++ b/app/serializers/environment_entity.rb
@@ -47,6 +47,22 @@ class EnvironmentEntity < Grape::Entity
environment.available? && can?(current_user, :stop_environment, environment)
end
+ expose :logs_path, if: -> (*) { can_read_pod_logs? } do |environment|
+ project_logs_path(environment.project, environment_name: environment.name)
+ end
+
+ expose :logs_api_path, if: -> (*) { can_read_pod_logs? } do |environment|
+ if environment.elastic_stack_available?
+ elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json)
+ else
+ k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json)
+ end
+ end
+
+ expose :enable_advanced_logs_querying, if: -> (*) { can_read_pod_logs? } do |environment|
+ environment.elastic_stack_available?
+ end
+
private
alias_method :environment, :object
@@ -63,6 +79,10 @@ class EnvironmentEntity < Grape::Entity
can?(current_user, :update_environment, environment)
end
+ def can_read_pod_logs?
+ can?(current_user, :read_pod_logs, environment.project)
+ end
+
def cluster_platform_kubernetes?
deployment_platform && deployment_platform.is_a?(Clusters::Platforms::Kubernetes)
end
diff --git a/app/services/pod_logs/base_service.rb b/app/services/pod_logs/base_service.rb
new file mode 100644
index 00000000000..668ee6b88a8
--- /dev/null
+++ b/app/services/pod_logs/base_service.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+module PodLogs
+ class BaseService < ::BaseService
+ include ReactiveCaching
+ include Stepable
+
+ attr_reader :cluster, :namespace, :params
+
+ CACHE_KEY_GET_POD_LOG = 'get_pod_log'
+ K8S_NAME_MAX_LENGTH = 253
+
+ SUCCESS_RETURN_KEYS = %i(status logs pod_name container_name pods).freeze
+
+ def id
+ cluster.id
+ end
+
+ def initialize(cluster, namespace, params: {})
+ @cluster = cluster
+ @namespace = namespace
+ @params = filter_params(params.dup.stringify_keys).to_hash
+ end
+
+ def execute
+ with_reactive_cache(
+ CACHE_KEY_GET_POD_LOG,
+ namespace,
+ params
+ ) do |result|
+ result
+ end
+ end
+
+ def calculate_reactive_cache(request, _namespace, _params)
+ case request
+ when CACHE_KEY_GET_POD_LOG
+ execute_steps
+ else
+ exception = StandardError.new('Unknown reactive cache request')
+ Gitlab::ErrorTracking.track_and_raise_for_dev_exception(exception, request: request)
+ error(_('Unknown cache key'))
+ end
+ end
+
+ private
+
+ def valid_params
+ %w(pod_name container_name)
+ end
+
+ def check_arguments(result)
+ return error(_('Cluster does not exist')) if cluster.nil?
+ return error(_('Namespace is empty')) if namespace.blank?
+
+ success(result)
+ end
+
+ def check_param_lengths(_result)
+ pod_name = params['pod_name'].presence
+ container_name = params['container_name'].presence
+
+ if pod_name&.length.to_i > K8S_NAME_MAX_LENGTH
+ return error(_('pod_name cannot be larger than %{max_length}'\
+ ' chars' % { max_length: K8S_NAME_MAX_LENGTH }))
+ elsif container_name&.length.to_i > K8S_NAME_MAX_LENGTH
+ return error(_('container_name cannot be larger than'\
+ ' %{max_length} chars' % { max_length: K8S_NAME_MAX_LENGTH }))
+ end
+
+ success(pod_name: pod_name, container_name: container_name)
+ end
+
+ def get_raw_pods(result)
+ result[:raw_pods] = cluster.kubeclient.get_pods(namespace: namespace)
+
+ success(result)
+ end
+
+ def get_pod_names(result)
+ result[:pods] = result[:raw_pods].map(&:metadata).map(&:name)
+
+ success(result)
+ end
+
+ def check_pod_name(result)
+ # If pod_name is not received as parameter, get the pod logs of the first
+ # pod of this namespace.
+ result[:pod_name] ||= result[:pods].first
+
+ unless result[:pod_name]
+ return error(_('No pods available'))
+ end
+
+ unless result[:pods].include?(result[:pod_name])
+ return error(_('Pod does not exist'))
+ end
+
+ success(result)
+ end
+
+ def check_container_name(result)
+ pod_details = result[:raw_pods].first { |p| p.metadata.name == result[:pod_name] }
+ containers = pod_details.spec.containers.map(&:name)
+
+ # select first container if not specified
+ result[:container_name] ||= containers.first
+
+ unless result[:container_name]
+ return error(_('No containers available'))
+ end
+
+ unless containers.include?(result[:container_name])
+ return error(_('Container does not exist'))
+ end
+
+ success(result)
+ end
+
+ def pod_logs(result)
+ raise NotImplementedError
+ end
+
+ def filter_return_keys(result)
+ result.slice(*SUCCESS_RETURN_KEYS)
+ end
+
+ def filter_params(params)
+ params.slice(*valid_params)
+ end
+ end
+end
diff --git a/app/services/pod_logs/elasticsearch_service.rb b/app/services/pod_logs/elasticsearch_service.rb
new file mode 100644
index 00000000000..7524bf7ce10
--- /dev/null
+++ b/app/services/pod_logs/elasticsearch_service.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+module PodLogs
+ class ElasticsearchService < BaseService
+ steps :check_arguments,
+ :check_param_lengths,
+ :get_raw_pods,
+ :get_pod_names,
+ :check_pod_name,
+ :check_container_name,
+ :check_times,
+ :check_search,
+ :pod_logs,
+ :filter_return_keys
+
+ self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
+
+ private
+
+ def valid_params
+ %w(pod_name container_name search start end)
+ end
+
+ def check_times(result)
+ result[:start] = params['start'] if params.key?('start') && Time.iso8601(params['start'])
+ result[:end] = params['end'] if params.key?('end') && Time.iso8601(params['end'])
+
+ success(result)
+ rescue ArgumentError
+ error(_('Invalid start or end time format'))
+ end
+
+ def check_search(result)
+ result[:search] = params['search'] if params.key?('search')
+
+ success(result)
+ end
+
+ def pod_logs(result)
+ client = cluster&.application_elastic_stack&.elasticsearch_client
+ return error(_('Unable to connect to Elasticsearch')) unless client
+
+ result[:logs] = ::Gitlab::Elasticsearch::Logs.new(client).pod_logs(
+ namespace,
+ result[:pod_name],
+ result[:container_name],
+ result[:search],
+ result[:start],
+ result[:end]
+ )
+
+ success(result)
+ rescue Elasticsearch::Transport::Transport::ServerError => e
+ ::Gitlab::ErrorTracking.track_exception(e)
+
+ error(_('Elasticsearch returned status code: %{status_code}') % {
+ # ServerError is the parent class of exceptions named after HTTP status codes, eg: "Elasticsearch::Transport::Transport::Errors::NotFound"
+ # there is no method on the exception other than the class name to determine the type of error encountered.
+ status_code: e.class.name.split('::').last
+ })
+ end
+ end
+end
diff --git a/app/services/pod_logs/kubernetes_service.rb b/app/services/pod_logs/kubernetes_service.rb
new file mode 100644
index 00000000000..8f12b364e73
--- /dev/null
+++ b/app/services/pod_logs/kubernetes_service.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+module PodLogs
+ class KubernetesService < BaseService
+ LOGS_LIMIT = 500.freeze
+ REPLACEMENT_CHAR = "\u{FFFD}"
+
+ EncodingHelperError = Class.new(StandardError)
+
+ steps :check_arguments,
+ :check_param_lengths,
+ :get_raw_pods,
+ :get_pod_names,
+ :check_pod_name,
+ :check_container_name,
+ :pod_logs,
+ :encode_logs_to_utf8,
+ :split_logs,
+ :filter_return_keys
+
+ self.reactive_cache_worker_finder = ->(id, _cache_key, namespace, params) { new(::Clusters::Cluster.find(id), namespace, params: params) }
+
+ private
+
+ def pod_logs(result)
+ result[:logs] = cluster.kubeclient.get_pod_log(
+ result[:pod_name],
+ namespace,
+ container: result[:container_name],
+ tail_lines: LOGS_LIMIT,
+ timestamps: true
+ ).body
+
+ success(result)
+ rescue Kubeclient::ResourceNotFoundError
+ error(_('Pod not found'))
+ rescue Kubeclient::HttpError => e
+ ::Gitlab::ErrorTracking.track_exception(e)
+
+ error(_('Kubernetes API returned status code: %{error_code}') % {
+ error_code: e.error_code
+ })
+ end
+
+ # Check https://gitlab.com/gitlab-org/gitlab/issues/34965#note_292261879
+ # for more details on why this is necessary.
+ def encode_logs_to_utf8(result)
+ return success(result) if result[:logs].nil?
+ return success(result) if result[:logs].encoding == Encoding::UTF_8
+
+ result[:logs] = encode_utf8(result[:logs])
+
+ success(result)
+ rescue EncodingHelperError
+ error(_('Unable to convert Kubernetes logs encoding to UTF-8'))
+ end
+
+ def split_logs(result)
+ result[:logs] = result[:logs].strip.lines(chomp: true).map do |line|
+ # message contains a RFC3339Nano timestamp, then a space, then the log line.
+ # resolution of the nanoseconds can vary, so we split on the first space
+ values = line.split(' ', 2)
+ {
+ timestamp: values[0],
+ message: values[1]
+ }
+ end
+
+ success(result)
+ end
+
+ def encode_utf8(logs)
+ utf8_logs = Gitlab::EncodingHelper.encode_utf8(logs.dup, replace: REPLACEMENT_CHAR)
+
+ # Gitlab::EncodingHelper.encode_utf8 can return '' or nil if an exception
+ # is raised while encoding. We prefer to return an error rather than wrongly
+ # display blank logs.
+ no_utf8_logs = logs.present? && utf8_logs.blank?
+ unexpected_encoding = utf8_logs&.encoding != Encoding::UTF_8
+
+ if no_utf8_logs || unexpected_encoding
+ raise EncodingHelperError, 'Could not convert Kubernetes logs to UTF-8'
+ end
+
+ utf8_logs
+ end
+ end
+end
diff --git a/app/views/layouts/nav/sidebar/_project.html.haml b/app/views/layouts/nav/sidebar/_project.html.haml
index 5afe43d6636..aef9532fd46 100644
--- a/app/views/layouts/nav/sidebar/_project.html.haml
+++ b/app/views/layouts/nav/sidebar/_project.html.haml
@@ -263,7 +263,11 @@
%span
= _('Serverless')
- = render_if_exists 'layouts/nav/sidebar/pod_logs_link' # EE-specific
+ - if project_nav_tab?(:environments) && can?(current_user, :read_pod_logs, @project)
+ = nav_link(controller: :logs, action: [:index]) do
+ = link_to project_logs_path(@project), title: _('Logs') do
+ %span
+ = _('Logs')
- if project_nav_tab? :clusters
- show_cluster_hint = show_gke_cluster_integration_callout?(@project)
diff --git a/app/views/projects/logs/empty_logs.html.haml b/app/views/projects/logs/empty_logs.html.haml
new file mode 100644
index 00000000000..52598e0be8d
--- /dev/null
+++ b/app/views/projects/logs/empty_logs.html.haml
@@ -0,0 +1,14 @@
+- page_title _('Logs')
+
+.row.empty-state
+ .col-sm-12
+ .svg-content
+ = image_tag 'illustrations/operations_log_pods_empty.svg'
+ .col-12
+ .text-content
+ %h4.text-center
+ = s_('Environments|No deployed environments')
+ %p.state-description.text-center
+ = s_('Logs|To see the logs, deploy your code to an environment.')
+ .text-center
+ = link_to s_('Environments|Learn about environments'), help_page_path('ci/environments'), class: 'btn btn-success'
diff --git a/app/views/projects/logs/index.html.haml b/app/views/projects/logs/index.html.haml
new file mode 100644
index 00000000000..1f74eb52fd9
--- /dev/null
+++ b/app/views/projects/logs/index.html.haml
@@ -0,0 +1 @@
+#environment-logs{ data: environment_logs_data(@project, @environment) }
diff --git a/changelogs/unreleased/ak-move-logs-to-core.yml b/changelogs/unreleased/ak-move-logs-to-core.yml
new file mode 100644
index 00000000000..488059161ad
--- /dev/null
+++ b/changelogs/unreleased/ak-move-logs-to-core.yml
@@ -0,0 +1,5 @@
+---
+title: Move pod logs to core
+merge_request: 25455
+author:
+type: changed
diff --git a/changelogs/unreleased/mo-fix-capybara-screenshots-rails.yml b/changelogs/unreleased/mo-fix-capybara-screenshots-rails.yml
new file mode 100644
index 00000000000..ae01f5503d8
--- /dev/null
+++ b/changelogs/unreleased/mo-fix-capybara-screenshots-rails.yml
@@ -0,0 +1,5 @@
+---
+title: Fix capybara screenshots path name for rails configuration
+merge_request: 27002
+author:
+type: fixed
diff --git a/config/gitlab.yml.example b/config/gitlab.yml.example
index ce9df6b6024..760688d8088 100644
--- a/config/gitlab.yml.example
+++ b/config/gitlab.yml.example
@@ -1227,7 +1227,7 @@ test:
client_path: tmp/tests/gitaly
token: secret
workhorse:
- secret_file: tmp/tests/gitlab_workhorse_secret
+ secret_file: tmp/gitlab_workhorse_test_secret
backup:
path: tmp/tests/backups
pseudonymizer:
diff --git a/config/routes/project.rb b/config/routes/project.rb
index 809c1386f2c..83575580321 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -175,6 +175,13 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
end
end
+ resources :logs, only: [:index] do
+ collection do
+ get :k8s
+ get :elasticsearch
+ end
+ end
+
resources :starrers, only: [:index]
resources :forks, only: [:index, :new, :create]
resources :group_links, only: [:index, :create, :update, :destroy], constraints: { id: /\d+/ }
diff --git a/doc/development/i18n/proofreader.md b/doc/development/i18n/proofreader.md
index 3cd8bf20e13..5535011d8c1 100644
--- a/doc/development/i18n/proofreader.md
+++ b/doc/development/i18n/proofreader.md
@@ -56,6 +56,7 @@ are very appreciative of the work done by translators and proofreaders!
- Adi Ferdian - [GitLab](https://gitlab.com/adiferd), [Crowdin](https://crowdin.com/profile/adiferd)
- Ahmad Naufal Mukhtar - [GitLab](https://gitlab.com/anaufalm), [Crowdin](https://crowdin.com/profile/anaufalm)
- Italian
+ - Massimiliano Cuttini - [GitLab](https://gitlab.com/maxcuttins), [Crowdin](https://crowdin.com/profile/maxcuttins)
- Paolo Falomo - [GitLab](https://gitlab.com/paolofalomo), [Crowdin](https://crowdin.com/profile/paolo.falomo)
- Japanese
- Hiroyuki Sato - [GitLab](https://gitlab.com/hiroponz), [Crowdin](https://crowdin.com/profile/hiroponz)
diff --git a/doc/user/admin_area/settings/continuous_integration.md b/doc/user/admin_area/settings/continuous_integration.md
index a77baf5d46c..bdb5125e5aa 100644
--- a/doc/user/admin_area/settings/continuous_integration.md
+++ b/doc/user/admin_area/settings/continuous_integration.md
@@ -191,3 +191,16 @@ To set required pipeline configuration:
1. Click **Save changes**.
![Required pipeline](img/admin_required_pipeline.png)
+
+## Package Registry configuration **(PREMIUM ONLY)**
+
+GitLab administrators can disable the forwarding of NPM requests to [npmjs.com](https://www.npmjs.com/).
+
+To disable it:
+
+1. Go to **Admin Area > Settings > CI/CD**.
+1. Expand the **Package Registry** section.
+1. Uncheck **Enable forwarding of NPM package requests to npmjs.org**.
+1. Click **Save changes**.
+
+![NPM package requests forwarding](img/admin_package_registry_npm_package_requests_forward.png)
diff --git a/doc/user/admin_area/settings/img/admin_package_registry_npm_package_requests_forward.png b/doc/user/admin_area/settings/img/admin_package_registry_npm_package_requests_forward.png
new file mode 100644
index 00000000000..3cd2e1adc29
--- /dev/null
+++ b/doc/user/admin_area/settings/img/admin_package_registry_npm_package_requests_forward.png
Binary files differ
diff --git a/doc/user/incident_management/index.md b/doc/user/incident_management/index.md
index 249dc8c8ad8..21dd3bf4d9a 100644
--- a/doc/user/incident_management/index.md
+++ b/doc/user/incident_management/index.md
@@ -88,12 +88,13 @@ dropdown box above the upper right corner of the panel:
The options are:
-- [View logs](#view-logs-ultimate) **(ULTIMATE)**
+- [View logs](#view-logs)
- [Download CSV](#download-csv)
-##### View logs **(ULTIMATE)**
+##### View logs
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/201846) in GitLab Ultimate 12.8.
+> [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25455) to [GitLab Core](https://about.gitlab.com/pricing/) 12.9.
This can be useful if you are triaging an application incident and need to
[explore logs](../project/integrations/prometheus.md#view-logs-ultimate)
diff --git a/doc/user/packages/npm_registry/index.md b/doc/user/packages/npm_registry/index.md
index ac21459d137..5801a30cc4e 100644
--- a/doc/user/packages/npm_registry/index.md
+++ b/doc/user/packages/npm_registry/index.md
@@ -269,6 +269,14 @@ Or if you're using Yarn:
yarn add @my-project-scope/my-package
```
+### Forwarding requests to npmjs.org
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/55344) in [GitLab Premium](https://about.gitlab.com/pricing/) 12.9.
+
+By default, when an NPM package is not found in the GitLab NPM Registry, the request will be forwarded to [npmjs.com](https://www.npmjs.com/).
+
+Administrators can disable this behavior in the [Continuous Integration settings](../../admin_area/settings/continuous_integration.md).
+
## Removing a package
In the packages view of your project page, you can delete packages by clicking
diff --git a/doc/user/project/clusters/index.md b/doc/user/project/clusters/index.md
index 9087653145b..c9d7d723eb5 100644
--- a/doc/user/project/clusters/index.md
+++ b/doc/user/project/clusters/index.md
@@ -27,7 +27,7 @@ Using the GitLab project Kubernetes integration, you can:
- Use [Web terminals](#web-terminals).
- Use [Deploy Boards](#deploy-boards-premium). **(PREMIUM)**
- Use [Canary Deployments](#canary-deployments-premium). **(PREMIUM)**
-- View [Logs](#logs-ultimate). **(ULTIMATE)**
+- View [Logs](#logs).
- Run serverless workloads on [Kubernetes with Knative](serverless/index.md).
### Deploy Boards **(PREMIUM)**
@@ -48,7 +48,7 @@ the need to leave GitLab.
[Read more about Canary Deployments](../canary_deployments.md)
-### Logs **(ULTIMATE)**
+### Logs
GitLab makes it easy to view the logs of running pods in connected Kubernetes clusters. By displaying the logs directly in GitLab, developers can avoid having to manage console tools or jump to a different interface.
diff --git a/doc/user/project/clusters/kubernetes_pod_logs.md b/doc/user/project/clusters/kubernetes_pod_logs.md
index 7fb3e797fc7..f1899c7b8d9 100644
--- a/doc/user/project/clusters/kubernetes_pod_logs.md
+++ b/doc/user/project/clusters/kubernetes_pod_logs.md
@@ -1,6 +1,7 @@
-# Kubernetes Logs **(ULTIMATE)**
+# Kubernetes Logs
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/4752) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.0.
+> [Moved](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25455) to [GitLab Core](https://about.gitlab.com/pricing/) 12.9.
GitLab makes it easy to view the logs of running pods in [connected Kubernetes clusters](index.md).
By displaying the logs directly in GitLab, developers can avoid having to manage console tools or jump to a different interface.
diff --git a/lib/gitlab/background_migration/backfill_snippet_repositories.rb b/lib/gitlab/background_migration/backfill_snippet_repositories.rb
new file mode 100644
index 00000000000..fa6453abefb
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_snippet_repositories.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Class that will fill the project_repositories table for projects that
+ # are on hashed storage and an entry is missing in this table.
+ class BackfillSnippetRepositories
+ MAX_RETRIES = 2
+
+ def perform(start_id, stop_id)
+ Snippet.includes(:author, snippet_repository: :shard).where(id: start_id..stop_id).find_each do |snippet|
+ # We need to expire the exists? value for the cached method in case it was cached
+ snippet.repository.expire_exists_cache
+
+ next if repository_present?(snippet)
+
+ retry_index = 0
+
+ begin
+ create_repository_and_files(snippet)
+
+ logger.info(message: 'Snippet Migration: repository created and migrated', snippet: snippet.id)
+ rescue => e
+ retry_index += 1
+
+ retry if retry_index < MAX_RETRIES
+
+ logger.error(message: "Snippet Migration: error migrating snippet. Reason: #{e.message}", snippet: snippet.id)
+
+ destroy_snippet_repository(snippet)
+ delete_repository(snippet)
+ end
+ end
+ end
+
+ private
+
+ def repository_present?(snippet)
+ snippet.snippet_repository && !snippet.empty_repo?
+ end
+
+ def create_repository_and_files(snippet)
+ snippet.create_repository
+ create_commit(snippet)
+ end
+
+ def destroy_snippet_repository(snippet)
+ # Removing the db record
+ snippet.snippet_repository&.destroy
+ rescue => e
+ logger.error(message: "Snippet Migration: error destroying snippet repository. Reason: #{e.message}", snippet: snippet.id)
+ end
+
+ def delete_repository(snippet)
+ # Removing the repository in disk
+ snippet.repository.remove if snippet.repository_exists?
+ rescue => e
+ logger.error(message: "Snippet Migration: error deleting repository. Reason: #{e.message}", snippet: snippet.id)
+ end
+
+ def logger
+ @logger ||= Gitlab::BackgroundMigration::Logger.build
+ end
+
+ def snippet_action(snippet)
+ # We don't need the previous_path param
+ # Because we're not updating any existing file
+ [{ file_path: filename(snippet),
+ content: snippet.content }]
+ end
+
+ def filename(snippet)
+ snippet.file_name.presence || empty_file_name
+ end
+
+ def empty_file_name
+ @empty_file_name ||= "#{SnippetRepository::DEFAULT_EMPTY_FILE_NAME}1.txt"
+ end
+
+ def commit_attrs
+ @commit_attrs ||= { branch_name: 'master', message: 'Initial commit' }
+ end
+
+ def create_commit(snippet)
+ snippet.snippet_repository.multi_files_action(snippet.author, snippet_action(snippet), commit_attrs)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/elasticsearch/logs.rb b/lib/gitlab/elasticsearch/logs.rb
new file mode 100644
index 00000000000..eca8b71dd7d
--- /dev/null
+++ b/lib/gitlab/elasticsearch/logs.rb
@@ -0,0 +1,115 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Elasticsearch
+ class Logs
+ # How many log lines to fetch in a query
+ LOGS_LIMIT = 500
+
+ def initialize(client)
+ @client = client
+ end
+
+ def pod_logs(namespace, pod_name, container_name = nil, search = nil, start_time = nil, end_time = nil)
+ query = { bool: { must: [] } }.tap do |q|
+ filter_pod_name(q, pod_name)
+ filter_namespace(q, namespace)
+ filter_container_name(q, container_name)
+ filter_search(q, search)
+ filter_times(q, start_time, end_time)
+ end
+
+ body = build_body(query)
+ response = @client.search body: body
+
+ format_response(response)
+ end
+
+ private
+
+ def build_body(query)
+ {
+ query: query,
+ # reverse order so we can query N-most recent records
+ sort: [
+ { "@timestamp": { order: :desc } },
+ { "offset": { order: :desc } }
+ ],
+ # only return these fields in the response
+ _source: ["@timestamp", "message"],
+ # fixed limit for now, we should support paginated queries
+ size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT
+ }
+ end
+
+ def filter_pod_name(query, pod_name)
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.pod.name" => {
+ query: pod_name
+ }
+ }
+ }
+ end
+
+ def filter_namespace(query, namespace)
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.namespace" => {
+ query: namespace
+ }
+ }
+ }
+ end
+
+ def filter_container_name(query, container_name)
+ # A pod can contain multiple containers.
+ # By default we return logs from every container
+ return if container_name.nil?
+
+ query[:bool][:must] << {
+ match_phrase: {
+ "kubernetes.container.name" => {
+ query: container_name
+ }
+ }
+ }
+ end
+
+ def filter_search(query, search)
+ return if search.nil?
+
+ query[:bool][:must] << {
+ simple_query_string: {
+ query: search,
+ fields: [:message],
+ default_operator: :and
+ }
+ }
+ end
+
+ def filter_times(query, start_time, end_time)
+ return unless start_time || end_time
+
+ time_range = { range: { :@timestamp => {} } }.tap do |tr|
+ tr[:range][:@timestamp][:gte] = start_time if start_time
+ tr[:range][:@timestamp][:lt] = end_time if end_time
+ end
+
+ query[:bool][:filter] = [time_range]
+ end
+
+ def format_response(response)
+ result = response.fetch("hits", {}).fetch("hits", []).map do |hit|
+ {
+ timestamp: hit["_source"]["@timestamp"],
+ message: hit["_source"]["message"]
+ }
+ end
+
+ # we queried for the N-most recent records but we want them ordered oldest to newest
+ result.reverse
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_counters/common.rb b/lib/gitlab/usage_counters/common.rb
new file mode 100644
index 00000000000..a5bdac430f4
--- /dev/null
+++ b/lib/gitlab/usage_counters/common.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UsageCounters
+ class Common
+ class << self
+ def increment(project_id)
+ Gitlab::Redis::SharedState.with { |redis| redis.hincrby(base_key, project_id, 1) }
+ end
+
+ def usage_totals
+ Gitlab::Redis::SharedState.with do |redis|
+ total_sum = 0
+
+ totals = redis.hgetall(base_key).each_with_object({}) do |(project_id, count), result|
+ total_sum += result[project_id.to_i] = count.to_i
+ end
+
+ totals[:total] = total_sum
+ totals
+ end
+ end
+
+ def base_key
+ raise NotImplementedError
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/usage_counters/pod_logs.rb b/lib/gitlab/usage_counters/pod_logs.rb
new file mode 100644
index 00000000000..94e29d2fad7
--- /dev/null
+++ b/lib/gitlab/usage_counters/pod_logs.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module UsageCounters
+ class PodLogs < Common
+ def self.base_key
+ 'POD_LOGS_USAGE_COUNTS'
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index edff8701c58..06eb51252f3 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -6110,6 +6110,9 @@ msgstr ""
msgid "CycleAnalyticsStage|should be under a group"
msgstr ""
+msgid "CycleAnalytics|%{selectedLabelsCount} selected (%{maxLabels} max)"
+msgstr ""
+
msgid "CycleAnalytics|%{stageCount} stages selected"
msgstr ""
@@ -6131,6 +6134,9 @@ msgstr ""
msgid "CycleAnalytics|Number of tasks"
msgstr ""
+msgid "CycleAnalytics|Only %{maxLabels} labels can be selected at this time"
+msgstr ""
+
msgid "CycleAnalytics|Project selected"
msgid_plural "CycleAnalytics|%d projects selected"
msgstr[0] ""
@@ -13170,6 +13176,9 @@ msgstr ""
msgid "No licenses found."
msgstr ""
+msgid "No matching labels"
+msgstr ""
+
msgid "No matching results"
msgstr ""
@@ -19906,6 +19915,9 @@ msgstr ""
msgid "There was an error fetching the environments information."
msgstr ""
+msgid "There was an error fetching the top labels for the selected group"
+msgstr ""
+
msgid "There was an error fetching the variables."
msgstr ""
@@ -20864,6 +20876,9 @@ msgstr ""
msgid "Total artifacts size: %{total_size}"
msgstr ""
+msgid "Total cores (vCPUs)"
+msgstr ""
+
msgid "Total issues"
msgstr ""
diff --git a/package.json b/package.json
index 6f44ec9924a..44577a5aa6d 100644
--- a/package.json
+++ b/package.json
@@ -40,7 +40,7 @@
"@babel/preset-env": "^7.8.4",
"@gitlab/at.js": "^1.5.5",
"@gitlab/svgs": "^1.110.0",
- "@gitlab/ui": "^9.23.0",
+ "@gitlab/ui": "^9.23.1",
"@gitlab/visual-review-tools": "1.5.1",
"@sentry/browser": "^5.10.2",
"@sourcegraph/code-host-integration": "0.0.31",
@@ -215,4 +215,4 @@
"node": ">=10.13.0",
"yarn": "^1.10.0"
}
-}
+} \ No newline at end of file
diff --git a/qa/qa/resource/ssh_key.rb b/qa/qa/resource/ssh_key.rb
index 22bdea424ca..3e130aef9e4 100644
--- a/qa/qa/resource/ssh_key.rb
+++ b/qa/qa/resource/ssh_key.rb
@@ -7,6 +7,8 @@ module QA
attr_accessor :title
+ attribute :id
+
def_delegators :key, :private_key, :public_key, :md5_fingerprint
def key
@@ -21,6 +23,35 @@ module QA
profile_page.add_key(public_key, title)
end
end
+
+ def fabricate_via_api!
+ api_post
+ end
+
+ def api_delete
+ QA::Runtime::Logger.debug("Deleting SSH key with title '#{title}' and fingerprint '#{md5_fingerprint}'")
+
+ super
+ end
+
+ def api_get_path
+ "/user/keys/#{id}"
+ end
+
+ def api_post_path
+ '/user/keys'
+ end
+
+ def api_post_body
+ {
+ title: title,
+ key: public_key
+ }
+ end
+
+ def api_delete_path
+ "/user/keys/#{id}"
+ end
end
end
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb
index c3379d41ff2..25866e12185 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/add_ssh_key_spec.rb
@@ -8,7 +8,7 @@ module QA
it 'user adds and then removes an SSH key', :smoke do
Flow::Login.sign_in
- key = Resource::SSHKey.fabricate! do |resource|
+ key = Resource::SSHKey.fabricate_via_browser_ui! do |resource|
resource.title = key_title
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb
index a9e9380cac4..e845c3ca8ea 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/protocol_v2_push_ssh_spec.rb
@@ -11,29 +11,24 @@ module QA
let(:key_title) { "key for ssh tests #{Time.now.to_f}" }
let(:ssh_key) do
- Resource::SSHKey.fabricate! do |resource|
+ Resource::SSHKey.fabricate_via_api! do |resource|
resource.title = key_title
end
end
around do |example|
- # Create an SSH key to be used with Git
+ # Create an SSH key to be used with Git, then remove it after the test
Flow::Login.sign_in
ssh_key
example.run
- # Remove the SSH key
- Flow::Login.sign_in
- Page::Main::Menu.perform(&:click_settings_link)
- Page::Profile::Menu.perform(&:click_ssh_keys)
- Page::Profile::SSHKeys.perform do |ssh_keys|
- ssh_keys.remove_key(key_title)
- end
+ ssh_key.remove_via_api!
+
+ Page::Main::Menu.perform(&:sign_out_if_signed_in)
end
it 'user pushes to the repository' do
- # Create a project to push to
project = Resource::Project.fabricate_via_api! do |project|
project.name = 'git-protocol-project'
end
@@ -68,11 +63,8 @@ module QA
project.visit!
project.wait_for_push_new_branch
- # Check that the push worked
expect(page).to have_content(file_name)
expect(page).to have_content(file_content)
-
- # And check that the correct Git protocol was used
expect(git_protocol_reported).to eq(git_protocol)
end
end
diff --git a/qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb b/qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb
index ab60ee33c1e..1a3c6d03098 100644
--- a/qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb
+++ b/qa/qa/specs/features/browser_ui/3_create/repository/use_ssh_key_spec.rb
@@ -11,7 +11,7 @@ module QA
it 'user adds an ssh key and pushes code to the repository' do
Flow::Login.sign_in
- key = Resource::SSHKey.fabricate! do |resource|
+ key = Resource::SSHKey.fabricate_via_api! do |resource|
resource.title = key_title
end
diff --git a/spec/controllers/projects/logs_controller_spec.rb b/spec/controllers/projects/logs_controller_spec.rb
new file mode 100644
index 00000000000..ea71dbe45aa
--- /dev/null
+++ b/spec/controllers/projects/logs_controller_spec.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::LogsController do
+ include KubernetesHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let_it_be(:environment) do
+ create(:environment, name: 'production', project: project)
+ end
+
+ let(:pod_name) { "foo" }
+ let(:container) { 'container-1' }
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ describe 'GET #index' do
+ let(:empty_project) { create(:project) }
+
+ it 'renders empty logs page if no environment exists' do
+ empty_project.add_maintainer(user)
+ get :index, params: { namespace_id: empty_project.namespace, project_id: empty_project }
+
+ expect(response).to be_ok
+ expect(response).to render_template 'empty_logs'
+ end
+
+ it 'renders index template' do
+ get :index, params: environment_params
+
+ expect(response).to be_ok
+ expect(response).to render_template 'index'
+ end
+ end
+
+ shared_examples 'pod logs service' do |endpoint, service|
+ let(:service_result) do
+ {
+ status: :success,
+ logs: ['Log 1', 'Log 2', 'Log 3'],
+ pods: [pod_name],
+ pod_name: pod_name,
+ container_name: container
+ }
+ end
+ let(:service_result_json) { JSON.parse(service_result.to_json) }
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*', projects: [project]) }
+
+ before do
+ allow_next_instance_of(service) do |instance|
+ allow(instance).to receive(:execute).and_return(service_result)
+ end
+ end
+
+ it 'returns the service result' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response).to eq(service_result_json)
+ end
+
+ it 'registers a usage of the endpoint' do
+ expect(::Gitlab::UsageCounters::PodLogs).to receive(:increment).with(project.id)
+
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+
+ it 'sets the polling header' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(response.headers['Poll-Interval']).to eq('3000')
+ end
+
+ context 'when service is processing' do
+ let(:service_result) { nil }
+
+ it 'returns a 202' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
+
+ shared_examples 'unsuccessful execution response' do |message|
+ let(:service_result) do
+ {
+ status: :error,
+ message: message
+ }
+ end
+
+ it 'returns the error' do
+ get endpoint, params: environment_params(pod_name: pod_name, format: :json)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response).to eq(service_result_json)
+ end
+ end
+
+ context 'when service is failing' do
+ it_behaves_like 'unsuccessful execution response', 'some error'
+ end
+
+ context 'when cluster is nil' do
+ let!(:cluster) { nil }
+
+ it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments'
+ end
+
+ context 'when namespace is empty' do
+ before do
+ allow(environment).to receive(:deployment_namespace).and_return('')
+ end
+
+ it_behaves_like 'unsuccessful execution response', 'Environment does not have deployments'
+ end
+ end
+
+ describe 'GET #k8s' do
+ it_behaves_like 'pod logs service', :k8s, PodLogs::KubernetesService
+ end
+
+ describe 'GET #elasticsearch' do
+ it_behaves_like 'pod logs service', :elasticsearch, PodLogs::ElasticsearchService
+ end
+
+ def environment_params(opts = {})
+ opts.reverse_merge(namespace_id: project.namespace,
+ project_id: project,
+ environment_name: environment.name)
+ end
+end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index dabb2b2dbf2..10958db299b 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -70,6 +70,7 @@ describe 'Project navbar' do
_('Environments'),
_('Error Tracking'),
_('Serverless'),
+ _('Logs'),
_('Kubernetes')
]
},
diff --git a/spec/fixtures/api/schemas/environment.json b/spec/fixtures/api/schemas/environment.json
index 7e7e5ce37e3..84217a2a01c 100644
--- a/spec/fixtures/api/schemas/environment.json
+++ b/spec/fixtures/api/schemas/environment.json
@@ -26,6 +26,9 @@
"stop_path": { "type": "string" },
"cancel_auto_stop_path": { "type": "string" },
"folder_path": { "type": "string" },
+ "logs_path": { "type": "string" },
+ "logs_api_path": { "type": "string" },
+ "enable_advanced_logs_querying": { "type": "boolean" },
"created_at": { "type": "string", "format": "date-time" },
"updated_at": { "type": "string", "format": "date-time" },
"auto_stop_at": { "type": "string", "format": "date-time" },
diff --git a/spec/fixtures/lib/elasticsearch/logs_response.json b/spec/fixtures/lib/elasticsearch/logs_response.json
new file mode 100644
index 00000000000..7a733882089
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/logs_response.json
@@ -0,0 +1,73 @@
+{
+ "took": 7087,
+ "timed_out": false,
+ "_shards": {
+ "total": 151,
+ "successful": 151,
+ "skipped": 0,
+ "failed": 0,
+ "failures": []
+ },
+ "hits": {
+ "total": 486924,
+ "max_score": null,
+ "hits": [
+ {
+ "_index": "filebeat-6.7.0-2019.10.25",
+ "_type": "doc",
+ "_id": "SkbxAW4BWzhswgK-C5-R",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:34.034Z"
+ },
+ "sort": [
+ 9999998,
+ 1571990602947
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.10.27",
+ "_type": "doc",
+ "_id": "wEigD24BWzhswgK-WUU2",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:35.034Z"
+ },
+ "sort": [
+ 9999949,
+ 1572220194500
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.11.04",
+ "_type": "doc",
+ "_id": "gE6uOG4BWzhswgK-M0x2",
+ "_score": null,
+ "_source": {
+ "message": "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13",
+ "@timestamp": "2019-12-13T14:35:36.034Z"
+ },
+ "sort": [
+ 9999944,
+ 1572908964497
+ ]
+ },
+ {
+ "_index": "filebeat-6.7.0-2019.10.30",
+ "_type": "doc",
+ "_id": "0klPHW4BWzhswgK-nfCF",
+ "_score": null,
+ "_source": {
+ "message": "- -\u003e /",
+ "@timestamp": "2019-12-13T14:35:37.034Z"
+ },
+ "sort": [
+ 9999934,
+ 1572449784442
+ ]
+ }
+ ]
+ }
+}
diff --git a/spec/fixtures/lib/elasticsearch/query.json b/spec/fixtures/lib/elasticsearch/query.json
new file mode 100644
index 00000000000..565c871b1c7
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query.json
@@ -0,0 +1,39 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_container.json b/spec/fixtures/lib/elasticsearch/query_with_container.json
new file mode 100644
index 00000000000..21eac5d7dbe
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_container.json
@@ -0,0 +1,46 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.container.name": {
+ "query": "auto-deploy-app"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_end_time.json b/spec/fixtures/lib/elasticsearch/query_with_end_time.json
new file mode 100644
index 00000000000..2859e6427d4
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_end_time.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "lt": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_search.json b/spec/fixtures/lib/elasticsearch/query_with_search.json
new file mode 100644
index 00000000000..3c9bed047fa
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_search.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ },
+ {
+ "simple_query_string": {
+ "query": "foo +bar ",
+ "fields": [
+ "message"
+ ],
+ "default_operator": "and"
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_start_time.json b/spec/fixtures/lib/elasticsearch/query_with_start_time.json
new file mode 100644
index 00000000000..0c5cfca42f7
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_start_time.json
@@ -0,0 +1,48 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "gte": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/fixtures/lib/elasticsearch/query_with_times.json b/spec/fixtures/lib/elasticsearch/query_with_times.json
new file mode 100644
index 00000000000..7108d42217e
--- /dev/null
+++ b/spec/fixtures/lib/elasticsearch/query_with_times.json
@@ -0,0 +1,49 @@
+{
+ "query": {
+ "bool": {
+ "must": [
+ {
+ "match_phrase": {
+ "kubernetes.pod.name": {
+ "query": "production-6866bc8974-m4sk4"
+ }
+ }
+ },
+ {
+ "match_phrase": {
+ "kubernetes.namespace": {
+ "query": "autodevops-deploy-9-production"
+ }
+ }
+ }
+ ],
+ "filter": [
+ {
+ "range": {
+ "@timestamp": {
+ "gte": "2019-12-13T14:35:34.034Z",
+ "lt": "2019-12-13T14:35:34.034Z"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "sort": [
+ {
+ "@timestamp": {
+ "order": "desc"
+ }
+ },
+ {
+ "offset": {
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": [
+ "@timestamp",
+ "message"
+ ],
+ "size": 500
+}
diff --git a/spec/frontend/blob/sketch/index_spec.js b/spec/frontend/blob/sketch/index_spec.js
new file mode 100644
index 00000000000..f5e9da21b2a
--- /dev/null
+++ b/spec/frontend/blob/sketch/index_spec.js
@@ -0,0 +1,92 @@
+import JSZip from 'jszip';
+import SketchLoader from '~/blob/sketch';
+
+jest.mock('jszip');
+
+describe('Sketch viewer', () => {
+ preloadFixtures('static/sketch_viewer.html');
+
+ beforeEach(() => {
+ loadFixtures('static/sketch_viewer.html');
+ window.URL = {
+ createObjectURL: jest.fn(() => 'http://foo/bar'),
+ };
+ });
+
+ afterEach(() => {
+ window.URL = {};
+ });
+
+ describe('with error message', () => {
+ beforeEach(done => {
+ jest.spyOn(SketchLoader.prototype, 'getZipFile').mockImplementation(
+ () =>
+ new Promise((resolve, reject) => {
+ reject();
+ done();
+ }),
+ );
+
+ return new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('renders error message', () => {
+ expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
+
+ expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
+ 'Cannot show preview.',
+ );
+ });
+
+ it('removes the loading icon', () => {
+ expect(document.querySelector('.js-loading-icon')).toBeNull();
+ });
+ });
+
+ describe('success', () => {
+ beforeEach(done => {
+ const loadAsyncMock = {
+ files: {
+ 'previews/preview.png': {
+ async: jest.fn(),
+ },
+ },
+ };
+
+ loadAsyncMock.files['previews/preview.png'].async.mockImplementation(
+ () =>
+ new Promise(resolve => {
+ resolve('foo');
+ done();
+ }),
+ );
+
+ jest.spyOn(SketchLoader.prototype, 'getZipFile').mockResolvedValue();
+ jest.spyOn(JSZip, 'loadAsync').mockResolvedValue(loadAsyncMock);
+ return new SketchLoader(document.getElementById('js-sketch-viewer'));
+ });
+
+ it('does not render error message', () => {
+ expect(document.querySelector('#js-sketch-viewer p')).toBeNull();
+ });
+
+ it('removes the loading icon', () => {
+ expect(document.querySelector('.js-loading-icon')).toBeNull();
+ });
+
+ it('renders preview img', () => {
+ const img = document.querySelector('#js-sketch-viewer img');
+
+ expect(img).not.toBeNull();
+ expect(img.classList.contains('img-fluid')).toBeTruthy();
+ });
+
+ it('renders link to image', () => {
+ const img = document.querySelector('#js-sketch-viewer img');
+ const link = document.querySelector('#js-sketch-viewer a');
+
+ expect(link.href).toBe(img.src);
+ expect(link.target).toBe('_blank');
+ });
+ });
+});
diff --git a/spec/frontend/clusters_list/mock_data.js b/spec/frontend/clusters_list/mock_data.js
index 1812bf9b03f..5398975d81c 100644
--- a/spec/frontend/clusters_list/mock_data.js
+++ b/spec/frontend/clusters_list/mock_data.js
@@ -5,6 +5,7 @@ export default [
size: '3',
clusterType: 'group_type',
status: 'disabled',
+ cpu: '6 (100% free)',
memory: '22.50 (30% free)',
},
{
@@ -13,6 +14,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'unreachable',
+ cpu: '3 (50% free)',
memory: '11 (60% free)',
},
{
@@ -21,6 +23,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'authentication_failure',
+ cpu: '1 (0% free)',
memory: '22 (33% free)',
},
{
@@ -29,6 +32,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'deleting',
+ cpu: '6 (100% free)',
memory: '45 (15% free)',
},
{
@@ -37,6 +41,7 @@ export default [
size: '12',
clusterType: 'project_type',
status: 'connected',
+ cpu: '6 (100% free)',
memory: '20.12 (35% free)',
},
];
diff --git a/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
index 292b8694fbc..14f2a527dfb 100644
--- a/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
+++ b/spec/frontend/create_cluster/components/cluster_form_dropdown_spec.js
@@ -7,22 +7,22 @@ import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue
import DropdownSearchInput from '~/vue_shared/components/dropdown/dropdown_search_input.vue';
describe('ClusterFormDropdown', () => {
- let vm;
+ let wrapper;
const firstItem = { name: 'item 1', value: 1 };
const secondItem = { name: 'item 2', value: 2 };
const items = [firstItem, secondItem, { name: 'item 3', value: 3 }];
beforeEach(() => {
- vm = shallowMount(ClusterFormDropdown);
+ wrapper = shallowMount(ClusterFormDropdown);
});
- afterEach(() => vm.destroy());
+ afterEach(() => wrapper.destroy());
describe('when initial value is provided', () => {
it('sets selectedItem to initial value', () => {
- vm.setProps({ items, value: secondItem.value });
+ wrapper.setProps({ items, value: secondItem.value });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(secondItem.name);
});
});
});
@@ -31,28 +31,29 @@ describe('ClusterFormDropdown', () => {
it('displays placeholder text', () => {
const placeholder = 'placeholder';
- vm.setProps({ placeholder });
+ wrapper.setProps({ placeholder });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(placeholder);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(placeholder);
});
});
});
describe('when an item is selected', () => {
beforeEach(() => {
- vm.setProps({ items });
+ wrapper.setProps({ items });
- return vm.vm.$nextTick().then(() => {
- vm.findAll('.js-dropdown-item')
+ return wrapper.vm.$nextTick().then(() => {
+ wrapper
+ .findAll('.js-dropdown-item')
.at(1)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
});
});
it('emits input event with selected item', () => {
- expect(vm.emitted('input')[0]).toEqual([secondItem.value]);
+ expect(wrapper.emitted('input')[0]).toEqual([secondItem.value]);
});
});
@@ -60,37 +61,54 @@ describe('ClusterFormDropdown', () => {
const value = [1];
beforeEach(() => {
- vm.setProps({ items, multiple: true, value });
- return vm.vm
+ wrapper.setProps({ items, multiple: true, value });
+ return wrapper.vm
.$nextTick()
.then(() => {
- vm.findAll('.js-dropdown-item')
+ wrapper
+ .findAll('.js-dropdown-item')
.at(0)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
})
.then(() => {
- vm.findAll('.js-dropdown-item')
+ wrapper
+ .findAll('.js-dropdown-item')
.at(1)
.trigger('click');
- return vm.vm.$nextTick();
+ return wrapper.vm.$nextTick();
});
});
it('emits input event with an array of selected items', () => {
- expect(vm.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
+ expect(wrapper.emitted('input')[1]).toEqual([[firstItem.value, secondItem.value]]);
});
});
describe('when multiple items can be selected', () => {
beforeEach(() => {
- vm.setProps({ items, multiple: true, value: firstItem.value });
- return vm.vm.$nextTick();
+ wrapper.setProps({ items, multiple: true, value: firstItem.value });
+ return wrapper.vm.$nextTick();
});
it('displays a checked GlIcon next to the item', () => {
- expect(vm.find(GlIcon).is('.invisible')).toBe(false);
- expect(vm.find(GlIcon).props('name')).toBe('mobile-issue-close');
+ expect(wrapper.find(GlIcon).is('.invisible')).toBe(false);
+ expect(wrapper.find(GlIcon).props('name')).toBe('mobile-issue-close');
+ });
+ });
+
+ describe('when multiple values can be selected and initial value is null', () => {
+ it('emits input event with an array of a single selected item', () => {
+ wrapper.setProps({ items, multiple: true, value: null });
+
+ return wrapper.vm.$nextTick().then(() => {
+ wrapper
+ .findAll('.js-dropdown-item')
+ .at(0)
+ .trigger('click');
+
+ expect(wrapper.emitted('input')[0]).toEqual([[firstItem.value]]);
+ });
});
});
@@ -101,20 +119,20 @@ describe('ClusterFormDropdown', () => {
const currentValue = 1;
const customLabelItems = [{ [labelProperty]: label, value: currentValue }];
- vm.setProps({ labelProperty, items: customLabelItems, value: currentValue });
+ wrapper.setProps({ labelProperty, items: customLabelItems, value: currentValue });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(label);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(label);
});
});
});
describe('when loading', () => {
it('dropdown button isLoading', () => {
- vm.setProps({ loading: true });
+ wrapper.setProps({ loading: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('isLoading')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('isLoading')).toBe(true);
});
});
});
@@ -123,20 +141,20 @@ describe('ClusterFormDropdown', () => {
it('uses loading text as toggle button text', () => {
const loadingText = 'loading text';
- vm.setProps({ loading: true, loadingText });
+ wrapper.setProps({ loading: true, loadingText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toEqual(loadingText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toEqual(loadingText);
});
});
});
describe('when disabled', () => {
it('dropdown button isDisabled', () => {
- vm.setProps({ disabled: true });
+ wrapper.setProps({ disabled: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('isDisabled')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('isDisabled')).toBe(true);
});
});
});
@@ -145,20 +163,20 @@ describe('ClusterFormDropdown', () => {
it('uses disabled text as toggle button text', () => {
const disabledText = 'disabled text';
- vm.setProps({ disabled: true, disabledText });
+ wrapper.setProps({ disabled: true, disabledText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).props('toggleText')).toBe(disabledText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).props('toggleText')).toBe(disabledText);
});
});
});
describe('when has errors', () => {
it('sets border-danger class selector to dropdown toggle', () => {
- vm.setProps({ hasErrors: true });
+ wrapper.setProps({ hasErrors: true });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownButton).classes('border-danger')).toBe(true);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownButton).classes('border-danger')).toBe(true);
});
});
});
@@ -167,10 +185,10 @@ describe('ClusterFormDropdown', () => {
it('displays error message', () => {
const errorMessage = 'error message';
- vm.setProps({ hasErrors: true, errorMessage });
+ wrapper.setProps({ hasErrors: true, errorMessage });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.js-eks-dropdown-error-message').text()).toEqual(errorMessage);
});
});
});
@@ -179,10 +197,10 @@ describe('ClusterFormDropdown', () => {
it('displays empty text', () => {
const emptyText = 'error message';
- vm.setProps({ items: [], emptyText });
+ wrapper.setProps({ items: [], emptyText });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find('.js-empty-text').text()).toEqual(emptyText);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find('.js-empty-text').text()).toEqual(emptyText);
});
});
});
@@ -190,34 +208,36 @@ describe('ClusterFormDropdown', () => {
it('displays search field placeholder', () => {
const searchFieldPlaceholder = 'Placeholder';
- vm.setProps({ searchFieldPlaceholder });
+ wrapper.setProps({ searchFieldPlaceholder });
- return vm.vm.$nextTick().then(() => {
- expect(vm.find(DropdownSearchInput).props('placeholderText')).toEqual(searchFieldPlaceholder);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.find(DropdownSearchInput).props('placeholderText')).toEqual(
+ searchFieldPlaceholder,
+ );
});
});
it('it filters results by search query', () => {
const searchQuery = secondItem.name;
- vm.setProps({ items });
- vm.setData({ searchQuery });
+ wrapper.setProps({ items });
+ wrapper.setData({ searchQuery });
- return vm.vm.$nextTick().then(() => {
- expect(vm.findAll('.js-dropdown-item').length).toEqual(1);
- expect(vm.find('.js-dropdown-item').text()).toEqual(secondItem.name);
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.findAll('.js-dropdown-item').length).toEqual(1);
+ expect(wrapper.find('.js-dropdown-item').text()).toEqual(secondItem.name);
});
});
it('focuses dropdown search input when dropdown is displayed', () => {
- const dropdownEl = vm.find('.dropdown').element;
+ const dropdownEl = wrapper.find('.dropdown').element;
- expect(vm.find(DropdownSearchInput).props('focused')).toBe(false);
+ expect(wrapper.find(DropdownSearchInput).props('focused')).toBe(false);
$(dropdownEl).trigger('shown.bs.dropdown');
- return vm.vm.$nextTick(() => {
- expect(vm.find(DropdownSearchInput).props('focused')).toBe(true);
+ return wrapper.vm.$nextTick(() => {
+ expect(wrapper.find(DropdownSearchInput).props('focused')).toBe(true);
});
});
});
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
new file mode 100644
index 00000000000..26542c3d046
--- /dev/null
+++ b/spec/frontend/logs/components/environment_logs_spec.js
@@ -0,0 +1,334 @@
+import Vue from 'vue';
+import { GlDropdown, GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
+import EnvironmentLogs from '~/logs/components/environment_logs.vue';
+
+import { createStore } from '~/logs/stores';
+import { scrollDown } from '~/lib/utils/scroll_utils';
+import {
+ mockEnvName,
+ mockEnvironments,
+ mockPods,
+ mockLogsResult,
+ mockTrace,
+ mockPodName,
+ mockSearch,
+ mockEnvironmentsEndpoint,
+ mockDocumentationPath,
+} from '../mock_data';
+
+jest.mock('~/lib/utils/scroll_utils');
+
+describe('EnvironmentLogs', () => {
+ let EnvironmentLogsComponent;
+ let store;
+ let wrapper;
+ let state;
+
+ const propsData = {
+ environmentName: mockEnvName,
+ environmentsPath: mockEnvironmentsEndpoint,
+ clusterApplicationsDocumentationPath: mockDocumentationPath,
+ };
+
+ const actionMocks = {
+ setInitData: jest.fn(),
+ setSearch: jest.fn(),
+ showPodLogs: jest.fn(),
+ showEnvironment: jest.fn(),
+ fetchEnvironments: jest.fn(),
+ };
+
+ const updateControlBtnsMock = jest.fn();
+
+ const findEnvironmentsDropdown = () => wrapper.find('.js-environments-dropdown');
+ const findPodsDropdown = () => wrapper.find('.js-pods-dropdown');
+ const findSearchBar = () => wrapper.find('.js-logs-search');
+ const findTimeRangePicker = () => wrapper.find({ ref: 'dateTimePicker' });
+ const findInfoAlert = () => wrapper.find('.js-elasticsearch-alert');
+
+ const findLogControlButtons = () => wrapper.find({ name: 'log-control-buttons-stub' });
+ const findLogTrace = () => wrapper.find('.js-log-trace');
+
+ const mockSetInitData = () => {
+ state.pods.options = mockPods;
+ state.environments.current = mockEnvName;
+ [state.pods.current] = state.pods.options;
+
+ state.logs.isComplete = false;
+ state.logs.lines = mockLogsResult;
+ };
+
+ const mockShowPodLogs = podName => {
+ state.pods.options = mockPods;
+ [state.pods.current] = podName;
+
+ state.logs.isComplete = false;
+ state.logs.lines = mockLogsResult;
+ };
+
+ const mockFetchEnvs = () => {
+ state.environments.options = mockEnvironments;
+ };
+
+ const initWrapper = () => {
+ wrapper = shallowMount(EnvironmentLogsComponent, {
+ propsData,
+ store,
+ stubs: {
+ LogControlButtons: {
+ name: 'log-control-buttons-stub',
+ template: '<div/>',
+ methods: {
+ update: updateControlBtnsMock,
+ },
+ },
+ },
+ methods: {
+ ...actionMocks,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ state = store.state.environmentLogs;
+ EnvironmentLogsComponent = Vue.extend(EnvironmentLogs);
+ });
+
+ afterEach(() => {
+ actionMocks.setInitData.mockReset();
+ actionMocks.showPodLogs.mockReset();
+ actionMocks.fetchEnvironments.mockReset();
+
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('displays UI elements', () => {
+ initWrapper();
+
+ expect(wrapper.isVueInstance()).toBe(true);
+ expect(wrapper.isEmpty()).toBe(false);
+
+ // top bar
+ expect(findEnvironmentsDropdown().is(GlDropdown)).toBe(true);
+ expect(findPodsDropdown().is(GlDropdown)).toBe(true);
+ expect(findLogControlButtons().exists()).toBe(true);
+
+ expect(findSearchBar().exists()).toBe(true);
+ expect(findSearchBar().is(GlSearchBoxByClick)).toBe(true);
+ expect(findTimeRangePicker().exists()).toBe(true);
+ expect(findTimeRangePicker().is(DateTimePicker)).toBe(true);
+
+ // log trace
+ expect(findLogTrace().isEmpty()).toBe(false);
+ });
+
+ it('mounted inits data', () => {
+ initWrapper();
+
+ expect(actionMocks.setInitData).toHaveBeenCalledTimes(1);
+ expect(actionMocks.setInitData).toHaveBeenLastCalledWith({
+ timeRange: expect.objectContaining({
+ default: true,
+ }),
+ environmentName: mockEnvName,
+ podName: null,
+ });
+
+ expect(actionMocks.fetchEnvironments).toHaveBeenCalledTimes(1);
+ expect(actionMocks.fetchEnvironments).toHaveBeenLastCalledWith(mockEnvironmentsEndpoint);
+ });
+
+ describe('loading state', () => {
+ beforeEach(() => {
+ state.pods.options = [];
+
+ state.logs = {
+ lines: [],
+ isLoading: true,
+ };
+
+ state.environments = {
+ options: [],
+ isLoading: true,
+ };
+
+ initWrapper();
+ });
+
+ it('displays a disabled environments dropdown', () => {
+ expect(findEnvironmentsDropdown().attributes('disabled')).toBe('true');
+ expect(findEnvironmentsDropdown().findAll(GlDropdownItem).length).toBe(0);
+ });
+
+ it('displays a disabled pods dropdown', () => {
+ expect(findPodsDropdown().attributes('disabled')).toBe('true');
+ expect(findPodsDropdown().findAll(GlDropdownItem).length).toBe(0);
+ });
+
+ it('displays a disabled search bar', () => {
+ expect(findSearchBar().exists()).toBe(true);
+ expect(findSearchBar().attributes('disabled')).toBe('true');
+ });
+
+ it('displays a disabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBe('true');
+ });
+
+ it('does not update buttons state', () => {
+ expect(updateControlBtnsMock).not.toHaveBeenCalled();
+ });
+
+ it('shows a logs trace', () => {
+ expect(findLogTrace().text()).toBe('');
+ expect(
+ findLogTrace()
+ .find('.js-build-loader-animation')
+ .isVisible(),
+ ).toBe(true);
+ });
+ });
+
+ describe('legacy environment', () => {
+ beforeEach(() => {
+ state.pods.options = [];
+
+ state.logs = {
+ lines: [],
+ isLoading: false,
+ };
+
+ state.environments = {
+ options: mockEnvironments,
+ current: 'staging',
+ isLoading: false,
+ };
+
+ initWrapper();
+ });
+
+ it('displays a disabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBe('true');
+ });
+
+ it('displays a disabled search bar', () => {
+ expect(findSearchBar().attributes('disabled')).toBe('true');
+ });
+
+ it('displays an alert to upgrade to ES', () => {
+ expect(findInfoAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('state with data', () => {
+ beforeEach(() => {
+ actionMocks.setInitData.mockImplementation(mockSetInitData);
+ actionMocks.showPodLogs.mockImplementation(mockShowPodLogs);
+ actionMocks.fetchEnvironments.mockImplementation(mockFetchEnvs);
+
+ initWrapper();
+ });
+
+ afterEach(() => {
+ scrollDown.mockReset();
+ updateControlBtnsMock.mockReset();
+
+ actionMocks.setInitData.mockReset();
+ actionMocks.showPodLogs.mockReset();
+ actionMocks.fetchEnvironments.mockReset();
+ });
+
+ it('displays an enabled search bar', () => {
+ expect(findSearchBar().attributes('disabled')).toBeFalsy();
+
+ // input a query and click `search`
+ findSearchBar().vm.$emit('input', mockSearch);
+ findSearchBar().vm.$emit('submit');
+
+ expect(actionMocks.setSearch).toHaveBeenCalledTimes(1);
+ expect(actionMocks.setSearch).toHaveBeenCalledWith(mockSearch);
+ });
+
+ it('displays an enabled time window dropdown', () => {
+ expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
+ });
+
+ it('does not display an alert to upgrade to ES', () => {
+ expect(findInfoAlert().exists()).toBe(false);
+ });
+
+ it('populates environments dropdown', () => {
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ expect(findEnvironmentsDropdown().props('text')).toBe(mockEnvName);
+ expect(items.length).toBe(mockEnvironments.length);
+ mockEnvironments.forEach((env, i) => {
+ const item = items.at(i);
+ expect(item.text()).toBe(env.name);
+ });
+ });
+
+ it('populates pods dropdown', () => {
+ const items = findPodsDropdown().findAll(GlDropdownItem);
+
+ expect(findPodsDropdown().props('text')).toBe(mockPodName);
+ expect(items.length).toBe(mockPods.length);
+ mockPods.forEach((pod, i) => {
+ const item = items.at(i);
+ expect(item.text()).toBe(pod);
+ });
+ });
+
+ it('populates logs trace', () => {
+ const trace = findLogTrace();
+ expect(trace.text().split('\n').length).toBe(mockTrace.length);
+ expect(trace.text().split('\n')).toEqual(mockTrace);
+ });
+
+ it('update control buttons state', () => {
+ expect(updateControlBtnsMock).toHaveBeenCalledTimes(1);
+ });
+
+ it('scrolls to bottom when loaded', () => {
+ expect(scrollDown).toHaveBeenCalledTimes(1);
+ });
+
+ describe('when user clicks', () => {
+ it('environment name, trace is refreshed', () => {
+ const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
+ const index = 1; // any env
+
+ expect(actionMocks.showEnvironment).toHaveBeenCalledTimes(0);
+
+ items.at(index).vm.$emit('click');
+
+ expect(actionMocks.showEnvironment).toHaveBeenCalledTimes(1);
+ expect(actionMocks.showEnvironment).toHaveBeenLastCalledWith(mockEnvironments[index].name);
+ });
+
+ it('pod name, trace is refreshed', () => {
+ const items = findPodsDropdown().findAll(GlDropdownItem);
+ const index = 2; // any pod
+
+ expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(0);
+
+ items.at(index).vm.$emit('click');
+
+ expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(1);
+ expect(actionMocks.showPodLogs).toHaveBeenLastCalledWith(mockPods[index]);
+ });
+
+ it('refresh button, trace is refreshed', () => {
+ expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(0);
+
+ findLogControlButtons().vm.$emit('refresh');
+
+ expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(1);
+ expect(actionMocks.showPodLogs).toHaveBeenLastCalledWith(mockPodName);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/components/log_control_buttons_spec.js b/spec/frontend/logs/components/log_control_buttons_spec.js
new file mode 100644
index 00000000000..f344e8189c3
--- /dev/null
+++ b/spec/frontend/logs/components/log_control_buttons_spec.js
@@ -0,0 +1,108 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlButton } from '@gitlab/ui';
+import LogControlButtons from '~/logs/components/log_control_buttons.vue';
+import {
+ canScroll,
+ isScrolledToTop,
+ isScrolledToBottom,
+ scrollDown,
+ scrollUp,
+} from '~/lib/utils/scroll_utils';
+
+jest.mock('~/lib/utils/scroll_utils');
+
+describe('LogControlButtons', () => {
+ let wrapper;
+
+ const findScrollToTop = () => wrapper.find('.js-scroll-to-top');
+ const findScrollToBottom = () => wrapper.find('.js-scroll-to-bottom');
+ const findRefreshBtn = () => wrapper.find('.js-refresh-log');
+
+ const initWrapper = () => {
+ wrapper = shallowMount(LogControlButtons);
+ };
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ it('displays UI elements', () => {
+ initWrapper();
+
+ expect(wrapper.isVueInstance()).toBe(true);
+ expect(wrapper.isEmpty()).toBe(false);
+
+ expect(findScrollToTop().is(GlButton)).toBe(true);
+ expect(findScrollToBottom().is(GlButton)).toBe(true);
+ expect(findRefreshBtn().is(GlButton)).toBe(true);
+ });
+
+ it('emits a `refresh` event on click on `refresh` button', () => {
+ initWrapper();
+
+ // An `undefined` value means no event was emitted
+ expect(wrapper.emitted('refresh')).toBe(undefined);
+
+ findRefreshBtn().vm.$emit('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.emitted('refresh')).toHaveLength(1);
+ });
+ });
+
+ describe('when scrolling actions are enabled', () => {
+ beforeEach(() => {
+ // mock scrolled to the middle of a long page
+ canScroll.mockReturnValue(true);
+ isScrolledToBottom.mockReturnValue(false);
+ isScrolledToTop.mockReturnValue(false);
+
+ initWrapper();
+ wrapper.vm.update();
+ return wrapper.vm.$nextTick();
+ });
+
+ afterEach(() => {
+ canScroll.mockReset();
+ isScrolledToTop.mockReset();
+ isScrolledToBottom.mockReset();
+ });
+
+ it('click on "scroll to top" scrolls up', () => {
+ expect(findScrollToTop().is('[disabled]')).toBe(false);
+
+ findScrollToTop().vm.$emit('click');
+
+ expect(scrollUp).toHaveBeenCalledTimes(1);
+ });
+
+ it('click on "scroll to bottom" scrolls down', () => {
+ expect(findScrollToBottom().is('[disabled]')).toBe(false);
+
+ findScrollToBottom().vm.$emit('click');
+
+ expect(scrollDown).toHaveBeenCalledTimes(1); // plus one time when trace was loaded
+ });
+ });
+
+ describe('when scrolling actions are disabled', () => {
+ beforeEach(() => {
+ // mock a short page without a scrollbar
+ canScroll.mockReturnValue(false);
+ isScrolledToBottom.mockReturnValue(true);
+ isScrolledToTop.mockReturnValue(true);
+
+ initWrapper();
+ });
+
+ it('buttons are disabled', () => {
+ wrapper.vm.update();
+ return wrapper.vm.$nextTick(() => {
+ expect(findScrollToTop().is('[disabled]')).toBe(true);
+ expect(findScrollToBottom().is('[disabled]')).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/mock_data.js b/spec/frontend/logs/mock_data.js
new file mode 100644
index 00000000000..4c092a84b36
--- /dev/null
+++ b/spec/frontend/logs/mock_data.js
@@ -0,0 +1,85 @@
+export const mockProjectPath = 'root/autodevops-deploy';
+export const mockEnvName = 'production';
+export const mockEnvironmentsEndpoint = `${mockProjectPath}/environments.json`;
+export const mockEnvId = '99';
+export const mockDocumentationPath = '/documentation.md';
+
+const makeMockEnvironment = (id, name, advancedQuerying) => ({
+ id,
+ project_path: mockProjectPath,
+ name,
+ logs_api_path: '/dummy_logs_path.json',
+ enable_advanced_logs_querying: advancedQuerying,
+});
+
+export const mockEnvironment = makeMockEnvironment(mockEnvId, mockEnvName, true);
+export const mockEnvironments = [
+ mockEnvironment,
+ makeMockEnvironment(101, 'staging', false),
+ makeMockEnvironment(102, 'review/a-feature', false),
+];
+
+export const mockPodName = 'production-764c58d697-aaaaa';
+export const mockPods = [
+ mockPodName,
+ 'production-764c58d697-bbbbb',
+ 'production-764c58d697-ccccc',
+ 'production-764c58d697-ddddd',
+];
+
+export const mockLogsResult = [
+ {
+ timestamp: '2019-12-13T13:43:18.2760123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:18.2760123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:26.8420123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:26.8420123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:28.3710123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:28.3710123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:36.8860123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:36.8860123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:38.4000123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:38.4000123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:46.8420123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:46.8430123Z', message: '- -> /' },
+ {
+ timestamp: '2019-12-13T13:43:48.3240123Z',
+ message: '10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
+ },
+ { timestamp: '2019-12-13T13:43:48.3250123Z', message: '- -> /' },
+];
+
+export const mockTrace = [
+ 'Dec 13 13:43:18.276Z | 10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:18.276Z | - -> /',
+ 'Dec 13 13:43:26.842Z | 10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:26.842Z | - -> /',
+ 'Dec 13 13:43:28.371Z | 10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:28.371Z | - -> /',
+ 'Dec 13 13:43:36.886Z | 10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:36.886Z | - -> /',
+ 'Dec 13 13:43:38.400Z | 10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:38.400Z | - -> /',
+ 'Dec 13 13:43:46.842Z | 10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:46.843Z | - -> /',
+ 'Dec 13 13:43:48.324Z | 10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
+ 'Dec 13 13:43:48.325Z | - -> /',
+];
+
+export const mockSearch = 'foo +bar';
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
new file mode 100644
index 00000000000..6309126159e
--- /dev/null
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -0,0 +1,324 @@
+import MockAdapter from 'axios-mock-adapter';
+
+import testAction from 'helpers/vuex_action_helper';
+import * as types from '~/logs/stores/mutation_types';
+import { convertToFixedRange } from '~/lib/utils/datetime_range';
+import logsPageState from '~/logs/stores/state';
+import {
+ setInitData,
+ setSearch,
+ showPodLogs,
+ fetchEnvironments,
+ fetchLogs,
+} from '~/logs/stores/actions';
+
+import { defaultTimeRange } from '~/monitoring/constants';
+
+import axios from '~/lib/utils/axios_utils';
+import flash from '~/flash';
+
+import {
+ mockProjectPath,
+ mockPodName,
+ mockEnvironmentsEndpoint,
+ mockEnvironments,
+ mockPods,
+ mockLogsResult,
+ mockEnvName,
+ mockSearch,
+} from '../mock_data';
+
+jest.mock('~/flash');
+jest.mock('~/lib/utils/datetime_range');
+jest.mock('~/logs/utils');
+
+const mockDefaultRange = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T10:00:00.000Z',
+};
+const mockFixedRange = {
+ start: '2020-01-09T18:06:20.000Z',
+ end: '2020-01-09T18:36:20.000Z',
+};
+const mockRollingRange = {
+ duration: 120,
+};
+const mockRollingRangeAsFixed = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T17:58:00.000Z',
+};
+
+describe('Logs Store actions', () => {
+ let state;
+ let mock;
+
+ convertToFixedRange.mockImplementation(range => {
+ if (range === defaultTimeRange) {
+ return { ...mockDefaultRange };
+ }
+ if (range === mockFixedRange) {
+ return { ...mockFixedRange };
+ }
+ if (range === mockRollingRange) {
+ return { ...mockRollingRangeAsFixed };
+ }
+ throw new Error('Invalid time range');
+ });
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ afterEach(() => {
+ flash.mockClear();
+ });
+
+ describe('setInitData', () => {
+ it('should commit environment and pod name mutation', () =>
+ testAction(setInitData, { environmentName: mockEnvName, podName: mockPodName }, state, [
+ { type: types.SET_PROJECT_ENVIRONMENT, payload: mockEnvName },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ ]));
+ });
+
+ describe('setSearch', () => {
+ it('should commit search mutation', () =>
+ testAction(
+ setSearch,
+ mockSearch,
+ state,
+ [{ type: types.SET_SEARCH, payload: mockSearch }],
+ [{ type: 'fetchLogs' }],
+ ));
+ });
+
+ describe('showPodLogs', () => {
+ it('should commit pod name', () =>
+ testAction(
+ showPodLogs,
+ mockPodName,
+ state,
+ [{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName }],
+ [{ type: 'fetchLogs' }],
+ ));
+ });
+
+ describe('fetchEnvironments', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ it('should commit RECEIVE_ENVIRONMENTS_DATA_SUCCESS mutation on correct data', () => {
+ mock.onGet(mockEnvironmentsEndpoint).replyOnce(200, { environments: mockEnvironments });
+ return testAction(
+ fetchEnvironments,
+ mockEnvironmentsEndpoint,
+ state,
+ [
+ { type: types.REQUEST_ENVIRONMENTS_DATA },
+ { type: types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS, payload: mockEnvironments },
+ ],
+ [{ type: 'fetchLogs' }],
+ );
+ });
+
+ it('should commit RECEIVE_ENVIRONMENTS_DATA_ERROR on wrong data', () => {
+ mock.onGet(mockEnvironmentsEndpoint).replyOnce(500);
+ return testAction(
+ fetchEnvironments,
+ mockEnvironmentsEndpoint,
+ state,
+ [
+ { type: types.REQUEST_ENVIRONMENTS_DATA },
+ { type: types.RECEIVE_ENVIRONMENTS_DATA_ERROR },
+ ],
+ [],
+ () => {
+ expect(flash).toHaveBeenCalledTimes(1);
+ },
+ );
+ });
+ });
+
+ describe('fetchLogs', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ it('should commit logs and pod data when there is pod name defined', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+ state.pods.current = mockPodName;
+
+ const endpoint = '/dummy_logs_path.json';
+
+ mock
+ .onGet(endpoint, {
+ params: {
+ pod_name: mockPodName,
+ ...mockDefaultRange,
+ },
+ })
+ .reply(200, {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ });
+
+ mock.onGet(endpoint).replyOnce(202); // mock reactive cache
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ ],
+ [],
+ );
+ });
+
+ it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
+ state.projectPath = mockProjectPath;
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+ state.pods.current = mockPodName;
+ state.timeRange.current = mockFixedRange;
+
+ const endpoint = '/dummy_logs_path.json';
+
+ mock
+ .onGet(endpoint, {
+ params: {
+ pod_name: mockPodName,
+ start: mockFixedRange.start,
+ end: mockFixedRange.end,
+ },
+ })
+ .reply(200, {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ });
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ ],
+ [],
+ );
+ });
+
+ it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+ state.pods.current = mockPodName;
+ state.search = mockSearch;
+ state.timeRange.current = 'INVALID_TIME_RANGE';
+
+ const endpoint = '/dummy_logs_path.json';
+
+ mock
+ .onGet(endpoint, {
+ params: {
+ pod_name: mockPodName,
+ search: mockSearch,
+ },
+ })
+ .reply(200, {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ });
+
+ mock.onGet(endpoint).replyOnce(202); // mock reactive cache
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ ],
+ [],
+ () => {
+ // Warning about time ranges was issued
+ expect(flash).toHaveBeenCalledTimes(1);
+ expect(flash).toHaveBeenCalledWith(expect.any(String), 'warning');
+ },
+ );
+ });
+
+ it('should commit logs and pod data when no pod name defined', done => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+
+ const endpoint = '/dummy_logs_path.json';
+
+ mock.onGet(endpoint, { params: { ...mockDefaultRange } }).reply(200, {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ });
+ mock.onGet(endpoint).replyOnce(202); // mock reactive cache
+
+ testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
+ { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ ],
+ [],
+ done,
+ );
+ });
+
+ it('should commit logs and pod errors when backend fails', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+
+ const endpoint = `/${mockProjectPath}/-/logs/elasticsearch.json?environment_name=${mockEnvName}`;
+ mock.onGet(endpoint).replyOnce(500);
+
+ return testAction(
+ fetchLogs,
+ null,
+ state,
+ [
+ { type: types.REQUEST_PODS_DATA },
+ { type: types.REQUEST_LOGS_DATA },
+ { type: types.RECEIVE_PODS_DATA_ERROR },
+ { type: types.RECEIVE_LOGS_DATA_ERROR },
+ ],
+ [],
+ () => {
+ expect(flash).toHaveBeenCalledTimes(1);
+ },
+ );
+ });
+ });
+});
diff --git a/spec/frontend/logs/stores/getters_spec.js b/spec/frontend/logs/stores/getters_spec.js
new file mode 100644
index 00000000000..fdce575fa97
--- /dev/null
+++ b/spec/frontend/logs/stores/getters_spec.js
@@ -0,0 +1,40 @@
+import * as getters from '~/logs/stores/getters';
+import logsPageState from '~/logs/stores/state';
+
+import { mockLogsResult, mockTrace } from '../mock_data';
+
+describe('Logs Store getters', () => {
+ let state;
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ describe('trace', () => {
+ describe('when state is initialized', () => {
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual('');
+ });
+ });
+
+ describe('when state logs are empty', () => {
+ beforeEach(() => {
+ state.logs.lines = [];
+ });
+
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual('');
+ });
+ });
+
+ describe('when state logs are set', () => {
+ beforeEach(() => {
+ state.logs.lines = mockLogsResult;
+ });
+
+ it('returns an empty string', () => {
+ expect(getters.trace(state)).toEqual(mockTrace.join('\n'));
+ });
+ });
+ });
+});
diff --git a/spec/frontend/logs/stores/mutations_spec.js b/spec/frontend/logs/stores/mutations_spec.js
new file mode 100644
index 00000000000..dcb358c7d5b
--- /dev/null
+++ b/spec/frontend/logs/stores/mutations_spec.js
@@ -0,0 +1,171 @@
+import mutations from '~/logs/stores/mutations';
+import * as types from '~/logs/stores/mutation_types';
+
+import logsPageState from '~/logs/stores/state';
+import {
+ mockEnvName,
+ mockEnvironments,
+ mockPods,
+ mockPodName,
+ mockLogsResult,
+ mockSearch,
+} from '../mock_data';
+
+describe('Logs Store Mutations', () => {
+ let state;
+
+ beforeEach(() => {
+ state = logsPageState();
+ });
+
+ it('ensures mutation types are correctly named', () => {
+ Object.keys(types).forEach(k => {
+ expect(k).toEqual(types[k]);
+ });
+ });
+
+ describe('SET_PROJECT_ENVIRONMENT', () => {
+ it('sets the environment', () => {
+ mutations[types.SET_PROJECT_ENVIRONMENT](state, mockEnvName);
+ expect(state.environments.current).toEqual(mockEnvName);
+ });
+ });
+
+ describe('SET_SEARCH', () => {
+ it('sets the search', () => {
+ mutations[types.SET_SEARCH](state, mockSearch);
+ expect(state.search).toEqual(mockSearch);
+ });
+ });
+
+ describe('REQUEST_ENVIRONMENTS_DATA', () => {
+ it('inits data', () => {
+ mutations[types.REQUEST_ENVIRONMENTS_DATA](state);
+ expect(state.environments.options).toEqual([]);
+ expect(state.environments.isLoading).toEqual(true);
+ });
+ });
+
+ describe('RECEIVE_ENVIRONMENTS_DATA_SUCCESS', () => {
+ it('receives environments data and stores it as options', () => {
+ expect(state.environments.options).toEqual([]);
+
+ mutations[types.RECEIVE_ENVIRONMENTS_DATA_SUCCESS](state, mockEnvironments);
+
+ expect(state.environments.options).toEqual(mockEnvironments);
+ expect(state.environments.isLoading).toEqual(false);
+ });
+ });
+
+ describe('RECEIVE_ENVIRONMENTS_DATA_ERROR', () => {
+ it('captures an error loading environments', () => {
+ mutations[types.RECEIVE_ENVIRONMENTS_DATA_ERROR](state);
+
+ expect(state.environments).toEqual({
+ options: [],
+ isLoading: false,
+ current: null,
+ });
+ });
+ });
+
+ describe('REQUEST_LOGS_DATA', () => {
+ it('starts loading for logs', () => {
+ mutations[types.REQUEST_LOGS_DATA](state);
+
+ expect(state.logs).toEqual(
+ expect.objectContaining({
+ lines: [],
+ isLoading: true,
+ isComplete: false,
+ }),
+ );
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_SUCCESS', () => {
+ it('receives logs lines', () => {
+ mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, mockLogsResult);
+
+ expect(state.logs).toEqual(
+ expect.objectContaining({
+ lines: mockLogsResult,
+ isLoading: false,
+ isComplete: true,
+ }),
+ );
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_ERROR', () => {
+ it('receives log data error and stops loading', () => {
+ mutations[types.RECEIVE_LOGS_DATA_ERROR](state);
+
+ expect(state.logs).toEqual(
+ expect.objectContaining({
+ lines: [],
+ isLoading: false,
+ isComplete: true,
+ }),
+ );
+ });
+ });
+
+ describe('SET_CURRENT_POD_NAME', () => {
+ it('set current pod name', () => {
+ mutations[types.SET_CURRENT_POD_NAME](state, mockPodName);
+
+ expect(state.pods.current).toEqual(mockPodName);
+ });
+ });
+
+ describe('SET_TIME_RANGE', () => {
+ it('sets a default range', () => {
+ expect(state.timeRange.current).toEqual(expect.any(Object));
+ });
+
+ it('sets a time range', () => {
+ const mockRange = {
+ start: '2020-01-10T18:00:00.000Z',
+ end: '2020-01-10T10:00:00.000Z',
+ };
+ mutations[types.SET_TIME_RANGE](state, mockRange);
+
+ expect(state.timeRange.current).toEqual(mockRange);
+ });
+ });
+
+ describe('REQUEST_PODS_DATA', () => {
+ it('receives log data error and stops loading', () => {
+ mutations[types.REQUEST_PODS_DATA](state);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: [],
+ }),
+ );
+ });
+ });
+ describe('RECEIVE_PODS_DATA_SUCCESS', () => {
+ it('receives pods data success', () => {
+ mutations[types.RECEIVE_PODS_DATA_SUCCESS](state, mockPods);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: mockPods,
+ }),
+ );
+ });
+ });
+ describe('RECEIVE_PODS_DATA_ERROR', () => {
+ it('receives pods data error', () => {
+ mutations[types.RECEIVE_PODS_DATA_ERROR](state);
+
+ expect(state.pods).toEqual(
+ expect.objectContaining({
+ options: [],
+ }),
+ );
+ });
+ });
+});
diff --git a/spec/frontend/logs/utils_spec.js b/spec/frontend/logs/utils_spec.js
new file mode 100644
index 00000000000..986fe320363
--- /dev/null
+++ b/spec/frontend/logs/utils_spec.js
@@ -0,0 +1,38 @@
+import { getTimeRange } from '~/logs/utils';
+
+describe('logs/utils', () => {
+ describe('getTimeRange', () => {
+ const nowTimestamp = 1577836800000;
+ const nowString = '2020-01-01T00:00:00.000Z';
+
+ beforeEach(() => {
+ jest.spyOn(Date, 'now').mockImplementation(() => nowTimestamp);
+ });
+
+ afterEach(() => {
+ Date.now.mockRestore();
+ });
+
+ it('returns the right values', () => {
+ expect(getTimeRange(0)).toEqual({
+ start: '2020-01-01T00:00:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 30)).toEqual({
+ start: '2019-12-31T23:30:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 60 * 24 * 7 * 1)).toEqual({
+ start: '2019-12-25T00:00:00.000Z',
+ end: nowString,
+ });
+
+ expect(getTimeRange(60 * 60 * 24 * 7 * 4)).toEqual({
+ start: '2019-12-04T00:00:00.000Z',
+ end: nowString,
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js b/spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js
index 0c2b7b7392d..4e06e5c12fc 100644
--- a/spec/javascripts/blob/balsamiq/balsamiq_viewer_integration_spec.js
+++ b/spec/javascripts/blob/balsamiq/balsamiq_viewer_browser_spec.js
@@ -1,3 +1,5 @@
+// this file can't be migrated to jest because it relies on the browser to perform integration tests:
+// see: https://gitlab.com/gitlab-org/gitlab/-/issues/194207#note_301878738
import { FIXTURES_PATH } from 'spec/test_constants';
import BalsamiqViewer from '~/blob/balsamiq/balsamiq_viewer';
diff --git a/spec/javascripts/blob/sketch/index_spec.js b/spec/javascripts/blob/sketch/index_spec.js
deleted file mode 100644
index 3d3129e10da..00000000000
--- a/spec/javascripts/blob/sketch/index_spec.js
+++ /dev/null
@@ -1,120 +0,0 @@
-/* eslint-disable no-new, promise/catch-or-return */
-import JSZip from 'jszip';
-import SketchLoader from '~/blob/sketch';
-
-describe('Sketch viewer', () => {
- const generateZipFileArrayBuffer = (zipFile, resolve, done) => {
- zipFile.generateAsync({ type: 'arrayBuffer' }).then(content => {
- resolve(content);
-
- setTimeout(() => {
- done();
- }, 100);
- });
- };
-
- preloadFixtures('static/sketch_viewer.html');
-
- beforeEach(() => {
- loadFixtures('static/sketch_viewer.html');
- });
-
- describe('with error message', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise((resolve, reject) => {
- reject();
-
- setTimeout(() => {
- done();
- });
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('renders error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
-
- expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
- 'Cannot show preview.',
- );
- });
-
- it('removes render the loading icon', () => {
- expect(document.querySelector('.js-loading-icon')).toBeNull();
- });
- });
-
- describe('success', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise(resolve => {
- const zipFile = new JSZip();
- zipFile
- .folder('previews')
- .file(
- 'preview.png',
- 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAMAAAAoyzS7AAAAA1BMVEUAAACnej3aAAAAAXRSTlMAQObYZgAAAA1JREFUeNoBAgD9/wAAAAIAAVMrnDAAAAAASUVORK5CYII=',
- {
- base64: true,
- },
- );
-
- generateZipFileArrayBuffer(zipFile, resolve, done);
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('does not render error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).toBeNull();
- });
-
- it('removes render the loading icon', () => {
- expect(document.querySelector('.js-loading-icon')).toBeNull();
- });
-
- it('renders preview img', () => {
- const img = document.querySelector('#js-sketch-viewer img');
-
- expect(img).not.toBeNull();
- expect(img.classList.contains('img-fluid')).toBeTruthy();
- });
-
- it('renders link to image', () => {
- const img = document.querySelector('#js-sketch-viewer img');
- const link = document.querySelector('#js-sketch-viewer a');
-
- expect(link.href).toBe(img.src);
- expect(link.target).toBe('_blank');
- });
- });
-
- describe('incorrect file', () => {
- beforeEach(done => {
- spyOn(SketchLoader.prototype, 'getZipFile').and.callFake(
- () =>
- new Promise(resolve => {
- const zipFile = new JSZip();
-
- generateZipFileArrayBuffer(zipFile, resolve, done);
- }),
- );
-
- new SketchLoader(document.getElementById('js-sketch-viewer'));
- });
-
- it('renders error message', () => {
- expect(document.querySelector('#js-sketch-viewer p')).not.toBeNull();
-
- expect(document.querySelector('#js-sketch-viewer p').textContent.trim()).toContain(
- 'Cannot show preview.',
- );
- });
- });
-});
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
new file mode 100644
index 00000000000..08d3b7bec6a
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_02_26_162723 do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:users) { table(:users) }
+ let(:snippets) { table(:snippets) }
+ let(:snippet_repositories) { table(:snippet_repositories) }
+
+ let(:user) { users.create(id: 1, email: 'user@example.com', projects_limit: 10, username: 'test', name: 'Test') }
+ let!(:snippet_with_repo) { snippets.create(id: 1, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+ let!(:snippet_with_empty_repo) { snippets.create(id: 2, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+ let!(:snippet_without_repo) { snippets.create(id: 3, type: 'PersonalSnippet', author_id: user.id, file_name: file_name, content: content) }
+
+ let(:file_name) { 'file_name.rb' }
+ let(:content) { 'content' }
+ let(:ids) { snippets.pluck('MIN(id)', 'MAX(id)').first }
+ let(:service) { described_class.new }
+
+ subject { service.perform(*ids) }
+
+ before do
+ allow(snippet_with_repo).to receive(:disk_path).and_return(disk_path(snippet_with_repo))
+
+ TestEnv.copy_repo(snippet_with_repo,
+ bare_repo: TestEnv.factory_repo_path_bare,
+ refs: TestEnv::BRANCH_SHA)
+
+ raw_repository(snippet_with_empty_repo).create_repository
+ end
+
+ after do
+ raw_repository(snippet_with_repo).remove
+ raw_repository(snippet_without_repo).remove
+ raw_repository(snippet_with_empty_repo).remove
+ end
+
+ describe '#perform' do
+ it 'logs successful migrated snippets' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:info).exactly(3).times
+ end
+
+ subject
+ end
+
+ context 'when snippet has a non empty repository' do
+ it 'does not perform any action' do
+ expect(service).not_to receive(:create_repository_and_files).with(snippet_with_repo)
+
+ subject
+ end
+ end
+
+ shared_examples 'commits the file to the repository' do
+ it do
+ subject
+
+ blob = blob_at(snippet, file_name)
+
+ aggregate_failures do
+ expect(blob).to be
+ expect(blob.data).to eq content
+ end
+ end
+ end
+
+ context 'when snippet has an empty repo' do
+ before do
+ expect(repository_exists?(snippet_with_empty_repo)).to be_truthy
+ end
+
+ it_behaves_like 'commits the file to the repository' do
+ let(:snippet) { snippet_with_empty_repo }
+ end
+ end
+
+ context 'when snippet does not have a repository' do
+ it 'creates the repository' do
+ expect { subject }.to change { repository_exists?(snippet_without_repo) }.from(false).to(true)
+ end
+
+ it_behaves_like 'commits the file to the repository' do
+ let(:snippet) { snippet_without_repo }
+ end
+ end
+
+ context 'when an error is raised' do
+ before do
+ allow(service).to receive(:create_commit).and_raise(StandardError)
+ end
+
+ it 'logs errors' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |instance|
+ expect(instance).to receive(:error).exactly(3).times
+ end
+
+ subject
+ end
+
+ it "retries #{described_class::MAX_RETRIES} times the operation if it fails" do
+ expect(service).to receive(:create_commit).exactly(snippets.count * described_class::MAX_RETRIES).times
+
+ subject
+ end
+
+ it 'destroys the snippet repository' do
+ expect(service).to receive(:destroy_snippet_repository).exactly(3).times.and_call_original
+
+ subject
+
+ expect(snippet_repositories.count).to eq 0
+ end
+
+ it 'deletes the repository on disk' do
+ subject
+
+ aggregate_failures do
+ expect(repository_exists?(snippet_with_repo)).to be_falsey
+ expect(repository_exists?(snippet_without_repo)).to be_falsey
+ expect(repository_exists?(snippet_with_empty_repo)).to be_falsey
+ end
+ end
+ end
+ end
+
+ def blob_at(snippet, path)
+ raw_repository(snippet).blob_at('master', path)
+ end
+
+ def repository_exists?(snippet)
+ gitlab_shell.repository_exists?('default', "#{disk_path(snippet)}.git")
+ end
+
+ def raw_repository(snippet)
+ Gitlab::Git::Repository.new('default',
+ "#{disk_path(snippet)}.git",
+ Gitlab::GlRepository::SNIPPET.identifier_for_container(snippet),
+ "@snippets/#{snippet.id}")
+ end
+
+ def hashed_repository(snippet)
+ Storage::Hashed.new(snippet, prefix: '@snippets')
+ end
+
+ def disk_path(snippet)
+ hashed_repository(snippet).disk_path
+ end
+
+ def ls_files(snippet)
+ raw_repository(snippet).ls_files(nil)
+ end
+end
diff --git a/spec/lib/gitlab/elasticsearch/logs_spec.rb b/spec/lib/gitlab/elasticsearch/logs_spec.rb
new file mode 100644
index 00000000000..b2f23e30465
--- /dev/null
+++ b/spec/lib/gitlab/elasticsearch/logs_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::Elasticsearch::Logs do
+ let(:client) { Elasticsearch::Transport::Client }
+
+ let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_2) { { timestamp: "2019-12-13T14:35:35.034Z", message: "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } }
+ let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", message: "- -\u003e /" } }
+
+ let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json')) }
+
+ subject { described_class.new(client) }
+
+ let(:namespace) { "autodevops-deploy-9-production" }
+ let(:pod_name) { "production-6866bc8974-m4sk4" }
+ let(:container_name) { "auto-deploy-app" }
+ let(:search) { "foo +bar "}
+ let(:start_time) { "2019-12-13T14:35:34.034Z" }
+ let(:end_time) { "2019-12-13T14:35:34.034Z" }
+
+ let(:body) { JSON.parse(fixture_file('lib/elasticsearch/query.json')) }
+ let(:body_with_container) { JSON.parse(fixture_file('lib/elasticsearch/query_with_container.json')) }
+ let(:body_with_search) { JSON.parse(fixture_file('lib/elasticsearch/query_with_search.json')) }
+ let(:body_with_times) { JSON.parse(fixture_file('lib/elasticsearch/query_with_times.json')) }
+ let(:body_with_start_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_start_time.json')) }
+ let(:body_with_end_time) { JSON.parse(fixture_file('lib/elasticsearch/query_with_end_time.json')) }
+
+ RSpec::Matchers.define :a_hash_equal_to_json do |expected|
+ match do |actual|
+ actual.as_json == expected
+ end
+ end
+
+ describe '#pod_logs' do
+ it 'returns the logs as an array' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by container name' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_container)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, container_name)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by search' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_search)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, nil, search)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by start_time and end_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_times)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, nil, nil, start_time, end_time)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by only start_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_start_time)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, nil, nil, start_time)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+
+ it 'can further filter the logs by only end_time' do
+ expect(client).to receive(:search).with(body: a_hash_equal_to_json(body_with_end_time)).and_return(es_response)
+
+ result = subject.pod_logs(namespace, pod_name, nil, nil, nil, end_time)
+ expect(result).to eq([es_message_4, es_message_3, es_message_2, es_message_1])
+ end
+ end
+end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 03aef7aea5c..6020db09ccf 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -1266,4 +1266,39 @@ describe Environment, :use_clean_rails_memory_store_caching do
expect(env).to be_persisted
end
end
+
+ describe '#elastic_stack_available?' do
+ let!(:cluster) { create(:cluster, :project, :provided_by_user, projects: [project]) }
+ let!(:deployment) { create(:deployment, :success, environment: environment, project: project, cluster: cluster) }
+
+ context 'when app does not exist' do
+ it 'returns false' do
+ expect(environment.elastic_stack_available?).to be(false)
+ end
+ end
+
+ context 'when app exists' do
+ let!(:application) { create(:clusters_applications_elastic_stack, cluster: cluster) }
+
+ it 'returns false' do
+ expect(environment.elastic_stack_available?).to be(false)
+ end
+ end
+
+ context 'when app is installed' do
+ let!(:application) { create(:clusters_applications_elastic_stack, :installed, cluster: cluster) }
+
+ it 'returns true' do
+ expect(environment.elastic_stack_available?).to be(true)
+ end
+ end
+
+ context 'when app is updated' do
+ let!(:application) { create(:clusters_applications_elastic_stack, :updated, cluster: cluster) }
+
+ it 'returns true' do
+ expect(environment.elastic_stack_available?).to be(true)
+ end
+ end
+ end
end
diff --git a/spec/models/snippet_repository_spec.rb b/spec/models/snippet_repository_spec.rb
index 088d37725aa..6861e03282a 100644
--- a/spec/models/snippet_repository_spec.rb
+++ b/spec/models/snippet_repository_spec.rb
@@ -26,44 +26,6 @@ describe SnippetRepository do
end
end
- describe '#create_file' do
- let(:snippet) { create(:personal_snippet, :empty_repo, author: user) }
-
- it 'creates the file' do
- snippet_repository.create_file(user, 'foo', 'bar', commit_opts)
- blob = first_blob(snippet)
-
- aggregate_failures do
- expect(blob).not_to be_nil
- expect(blob.path).to eq 'foo'
- expect(blob.data).to eq 'bar'
- end
- end
-
- it 'fills the file path if empty' do
- snippet_repository.create_file(user, nil, 'bar', commit_opts)
- blob = first_blob(snippet)
-
- aggregate_failures do
- expect(blob).not_to be_nil
- expect(blob.path).to eq 'snippetfile1.txt'
- expect(blob.data).to eq 'bar'
- end
- end
-
- context 'when the file exists' do
- let(:snippet) { create(:personal_snippet, :repository, author: user) }
-
- it 'captures the git exception and raises a SnippetRepository::CommitError' do
- existing_blob = first_blob(snippet)
-
- expect do
- snippet_repository.create_file(user, existing_blob.path, existing_blob.data, commit_opts)
- end.to raise_error described_class::CommitError
- end
- end
- end
-
describe '#multi_files_action' do
let(:new_file) { { file_path: 'new_file_test', content: 'bar' } }
let(:move_file) { { previous_path: 'CHANGELOG', file_path: 'CHANGELOG_new', content: 'bar' } }
diff --git a/spec/serializers/environment_entity_spec.rb b/spec/serializers/environment_entity_spec.rb
index f392ecea959..b4ea90d2141 100644
--- a/spec/serializers/environment_entity_spec.rb
+++ b/spec/serializers/environment_entity_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
describe EnvironmentEntity do
+ include Gitlab::Routing.url_helpers
+
let(:request) { double('request') }
let(:entity) do
described_class.new(environment, request: spy('request'))
@@ -71,4 +73,22 @@ describe EnvironmentEntity do
expect(subject).to include(:cancel_auto_stop_path, :auto_stop_at)
end
end
+
+ context 'pod_logs' do
+ it 'exposes logs keys' do
+ expect(subject).to include(:logs_path)
+ expect(subject).to include(:logs_api_path)
+ expect(subject).to include(:enable_advanced_logs_querying)
+ end
+
+ it 'uses k8s api when ES is not available' do
+ expect(subject[:logs_api_path]).to eq(k8s_project_logs_path(environment.project, environment_name: environment.name, format: :json))
+ end
+
+ it 'uses ES api when ES is available' do
+ allow(environment).to receive(:elastic_stack_available?).and_return(true)
+
+ expect(subject[:logs_api_path]).to eq(elasticsearch_project_logs_path(environment.project, environment_name: environment.name, format: :json))
+ end
+ end
end
diff --git a/spec/services/pod_logs/base_service_spec.rb b/spec/services/pod_logs/base_service_spec.rb
new file mode 100644
index 00000000000..a18fda544df
--- /dev/null
+++ b/spec/services/pod_logs/base_service_spec.rb
@@ -0,0 +1,229 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::BaseService do
+ include KubernetesHelpers
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:container_name) { 'container-0' }
+ let(:params) { {} }
+ let(:raw_pods) do
+ JSON.parse([
+ kube_pod(name: pod_name)
+ ].to_json, object_class: OpenStruct)
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#initialize' do
+ let(:params) do
+ {
+ 'container_name' => container_name,
+ 'another_param' => 'foo'
+ }
+ end
+
+ it 'filters the parameters' do
+ expect(subject.cluster).to eq(cluster)
+ expect(subject.namespace).to eq(namespace)
+ expect(subject.params).to eq({
+ 'container_name' => container_name
+ })
+ expect(subject.params.equal?(params)).to be(false)
+ end
+ end
+
+ describe '#check_arguments' do
+ context 'when cluster and namespace are provided' do
+ it 'returns success' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:success)
+ end
+ end
+
+ context 'when cluster is nil' do
+ let(:cluster) { nil }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Cluster does not exist')
+ end
+ end
+
+ context 'when namespace is nil' do
+ let(:namespace) { nil }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Namespace is empty')
+ end
+ end
+
+ context 'when namespace is empty' do
+ let(:namespace) { '' }
+
+ it 'returns an error' do
+ result = subject.send(:check_arguments, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Namespace is empty')
+ end
+ end
+ end
+
+ describe '#check_param_lengths' do
+ context 'when pod_name and container_name are provided' do
+ let(:params) do
+ {
+ 'pod_name' => pod_name,
+ 'container_name' => container_name
+ }
+ end
+
+ it 'returns success' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ expect(result[:container_name]).to eq(container_name)
+ end
+ end
+
+ context 'when pod_name is too long' do
+ let(:params) do
+ {
+ 'pod_name' => "a very long string." * 15
+ }
+ end
+
+ it 'returns an error' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('pod_name cannot be larger than 253 chars')
+ end
+ end
+
+ context 'when container_name is too long' do
+ let(:params) do
+ {
+ 'container_name' => "a very long string." * 15
+ }
+ end
+
+ it 'returns an error' do
+ result = subject.send(:check_param_lengths, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('container_name cannot be larger than 253 chars')
+ end
+ end
+ end
+
+ describe '#get_raw_pods' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns success with passthrough k8s response' do
+ stub_kubeclient_pods(namespace)
+
+ result = subject.send(:get_raw_pods, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:raw_pods].first).to be_a(Kubeclient::Resource)
+ end
+ end
+
+ describe '#get_pod_names' do
+ it 'returns success with a list of pods' do
+ result = subject.send(:get_pod_names, raw_pods: raw_pods)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pods]).to eq([pod_name])
+ end
+ end
+
+ describe '#check_pod_name' do
+ it 'returns success if pod_name was specified' do
+ result = subject.send(:check_pod_name, pod_name: pod_name, pods: [pod_name])
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ end
+
+ it 'returns success if pod_name was not specified but there are pods' do
+ result = subject.send(:check_pod_name, pod_name: nil, pods: [pod_name])
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:pod_name]).to eq(pod_name)
+ end
+
+ it 'returns error if pod_name was not specified and there are no pods' do
+ result = subject.send(:check_pod_name, pod_name: nil, pods: [])
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No pods available')
+ end
+
+ it 'returns error if pod_name was specified but does not exist' do
+ result = subject.send(:check_pod_name, pod_name: 'another_pod', pods: [pod_name])
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Pod does not exist')
+ end
+ end
+
+ describe '#check_container_name' do
+ it 'returns success if container_name was specified' do
+ result = subject.send(:check_container_name,
+ container_name: container_name,
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:container_name]).to eq(container_name)
+ end
+
+ it 'returns success if container_name was not specified and there are containers' do
+ result = subject.send(:check_container_name,
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:container_name]).to eq(container_name)
+ end
+
+ it 'returns error if container_name was not specified and there are no containers on the pod' do
+ raw_pods.first.spec.containers = []
+
+ result = subject.send(:check_container_name,
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('No containers available')
+ end
+
+ it 'returns error if container_name was specified but does not exist' do
+ result = subject.send(:check_container_name,
+ container_name: 'foo',
+ pod_name: pod_name,
+ raw_pods: raw_pods
+ )
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Container does not exist')
+ end
+ end
+end
diff --git a/spec/services/pod_logs/elasticsearch_service_spec.rb b/spec/services/pod_logs/elasticsearch_service_spec.rb
new file mode 100644
index 00000000000..0f0c36da56a
--- /dev/null
+++ b/spec/services/pod_logs/elasticsearch_service_spec.rb
@@ -0,0 +1,174 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::ElasticsearchService do
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:container_name) { 'container-1' }
+ let(:search) { 'foo -bar' }
+ let(:start_time) { '2019-01-02T12:13:14+02:00' }
+ let(:end_time) { '2019-01-03T12:13:14+02:00' }
+ let(:params) { {} }
+ let(:expected_logs) do
+ [
+ { message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
+ { message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
+ { message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
+ ]
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#check_times' do
+ context 'with start and end provided and valid' do
+ let(:params) do
+ {
+ 'start' => start_time,
+ 'end' => end_time
+ }
+ end
+
+ it 'returns success with times' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:start]).to eq(start_time)
+ expect(result[:end]).to eq(end_time)
+ end
+ end
+
+ context 'with start and end not provided' do
+ let(:params) do
+ {}
+ end
+
+ it 'returns success with nothing else' do
+ result = subject.send(:check_times, {})
+
+ expect(result.keys.length).to eq(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
+
+ context 'with start valid and end invalid' do
+ let(:params) do
+ {
+ 'start' => start_time,
+ 'end' => 'invalid date'
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid start or end time format')
+ end
+ end
+
+ context 'with start invalid and end valid' do
+ let(:params) do
+ {
+ 'start' => 'invalid date',
+ 'end' => end_time
+ }
+ end
+
+ it 'returns error' do
+ result = subject.send(:check_times, {})
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Invalid start or end time format')
+ end
+ end
+ end
+
+ describe '#check_search' do
+ context 'with search provided and valid' do
+ let(:params) do
+ {
+ 'search' => search
+ }
+ end
+
+ it 'returns success with search' do
+ result = subject.send(:check_search, {})
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:search]).to eq(search)
+ end
+ end
+
+ context 'with search not provided' do
+ let(:params) do
+ {}
+ end
+
+ it 'returns success with nothing else' do
+ result = subject.send(:check_search, {})
+
+ expect(result.keys.length).to eq(1)
+ expect(result[:status]).to eq(:success)
+ end
+ end
+ end
+
+ describe '#pod_logs' do
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ search: search,
+ start: start_time,
+ end: end_time
+ }
+ end
+
+ before do
+ create(:clusters_applications_elastic_stack, :installed, cluster: cluster)
+ end
+
+ it 'returns the logs' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ .to receive(:pod_logs)
+ .with(namespace, pod_name, container_name, search, start_time, end_time)
+ .and_return(expected_logs)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+
+ it 'returns an error when ES is unreachable' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(nil)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to connect to Elasticsearch')
+ end
+
+ it 'handles server errors from elasticsearch' do
+ allow_any_instance_of(::Clusters::Applications::ElasticStack)
+ .to receive(:elasticsearch_client)
+ .and_return(Elasticsearch::Transport::Client.new)
+ allow_any_instance_of(::Gitlab::Elasticsearch::Logs)
+ .to receive(:pod_logs)
+ .and_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable.new)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Elasticsearch returned status code: ServiceUnavailable')
+ end
+ end
+end
diff --git a/spec/services/pod_logs/kubernetes_service_spec.rb b/spec/services/pod_logs/kubernetes_service_spec.rb
new file mode 100644
index 00000000000..9fab88a14f6
--- /dev/null
+++ b/spec/services/pod_logs/kubernetes_service_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe ::PodLogs::KubernetesService do
+ include KubernetesHelpers
+
+ let_it_be(:cluster) { create(:cluster, :provided_by_gcp, environment_scope: '*') }
+ let(:namespace) { 'autodevops-deploy-9-production' }
+
+ let(:pod_name) { 'pod-1' }
+ let(:container_name) { 'container-1' }
+ let(:params) { {} }
+
+ let(:raw_logs) do
+ "2019-12-13T14:04:22.123456Z Log 1\n2019-12-13T14:04:23.123456Z Log 2\n" \
+ "2019-12-13T14:04:24.123456Z Log 3"
+ end
+
+ subject { described_class.new(cluster, namespace, params: params) }
+
+ describe '#pod_logs' do
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name
+ }
+ end
+
+ let(:expected_logs) { raw_logs }
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ it 'returns the logs' do
+ stub_kubeclient_logs(pod_name, namespace, container: container_name)
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+
+ it 'handles Not Found errors from k8s' do
+ allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
+ .to receive(:get_pod_log)
+ .with(any_args)
+ .and_raise(Kubeclient::ResourceNotFoundError.new(404, 'Not Found', {}))
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Pod not found')
+ end
+
+ it 'handles HTTP errors from k8s' do
+ allow_any_instance_of(Gitlab::Kubernetes::KubeClient)
+ .to receive(:get_pod_log)
+ .with(any_args)
+ .and_raise(Kubeclient::HttpError.new(500, 'Error', {}))
+
+ result = subject.send(:pod_logs, result_arg)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Kubernetes API returned status code: 500')
+ end
+ end
+
+ describe '#encode_logs_to_utf8', :aggregate_failures do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+ let(:expected_logs) { '2019-12-13T14:04:22.123456Z ✔ Started logging errors to Sentry' }
+ let(:raw_logs) { expected_logs.dup.force_encoding(Encoding::ASCII_8BIT) }
+ let(:result) { subject.send(:encode_logs_to_utf8, result_arg) }
+
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ logs: raw_logs
+ }
+ end
+
+ it 'converts logs to utf-8' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+
+ it 'returns error if output of encoding helper is blank' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return('')
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ it 'returns error if output of encoding helper is nil' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8).and_return(nil)
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ it 'returns error if output of encoding helper is not UTF-8' do
+ allow(Gitlab::EncodingHelper).to receive(:encode_utf8)
+ .and_return(expected_logs.encode(Encoding::UTF_16BE))
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('Unable to convert Kubernetes logs encoding to UTF-8')
+ end
+
+ context 'when logs are nil' do
+ let(:raw_logs) { nil }
+ let(:expected_logs) { nil }
+
+ it 'returns nil' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+
+ context 'when logs are blank' do
+ let(:raw_logs) { (+'').force_encoding(Encoding::ASCII_8BIT) }
+ let(:expected_logs) { '' }
+
+ it 'returns blank string' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+
+ context 'when logs are already in utf-8' do
+ let(:raw_logs) { expected_logs }
+
+ it 'does not fail' do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+ end
+
+ describe '#split_logs' do
+ let(:service) { create(:cluster_platform_kubernetes, :configured) }
+
+ let(:expected_logs) do
+ [
+ { message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
+ { message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
+ { message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
+ ]
+ end
+
+ let(:result_arg) do
+ {
+ pod_name: pod_name,
+ container_name: container_name,
+ logs: raw_logs
+ }
+ end
+
+ it 'returns the logs' do
+ result = subject.send(:split_logs, result_arg)
+
+ aggregate_failures do
+ expect(result[:status]).to eq(:success)
+ expect(result[:logs]).to eq(expected_logs)
+ end
+ end
+ end
+end
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index 9ac7d0df737..5d8779ec782 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -82,7 +82,7 @@ Capybara.enable_aria_label = true
Capybara::Screenshot.append_timestamp = false
Capybara::Screenshot.register_filename_prefix_formatter(:rspec) do |example|
- ::File.join(QA::Runtime::Namespace.name, example.full_description.downcase.parameterize(separator: "_")[0..99])
+ example.full_description.downcase.parameterize(separator: "_")[0..99]
end
# Keep only the screenshots generated from the last failing test suite
Capybara::Screenshot.prune_strategy = :keep_last_run
diff --git a/yarn.lock b/yarn.lock
index 83ec5ec912d..b6691bf0a86 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -801,10 +801,10 @@
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.110.0.tgz#3c4f5f0e78fcf616ec63a265754158b84ed80af8"
integrity sha512-bLVUW9Hj6j7zTdeoQELO3Bls5xDKr6AoSEU8gZbEZKLK9PV81hxRl/lJPJUo1qt4E7eJGapCTlH73tTIL4OZ3A==
-"@gitlab/ui@^9.23.0":
- version "9.23.0"
- resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.23.0.tgz#0ad0232c529d1f8a386c8e86159e273111a55686"
- integrity sha512-1VOob5tNPB3zjLHeTuMbQBMG3q6LF36iCq6XqH5eeYzpAI42zj/WhY5T47RKrfvlkflWRSUPTarGo97pQqIKzg==
+"@gitlab/ui@^9.23.1":
+ version "9.23.1"
+ resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.23.1.tgz#791d0c8a6762b1dd73ed686326c1dfb3f0c7b987"
+ integrity sha512-7bGcV2W6qh/KK423W/vasv+S6myWJMD1tyMr5MBz1WQRg/B3eUlpr4HbjQXmtALRWiWkag8GMI/HSy0rby4WrA==
dependencies:
"@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0"