summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-08-17 00:10:02 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-08-17 00:10:02 +0000
commitd17008f99ed08e7c231b7e8e65cba1be51e2e747 (patch)
treebd3089c50b6b05b9921d1ea48b0de1382cde0655
parentf5617fbd782cf1465572a524a8885f3eccf9ca1c (diff)
downloadgitlab-ce-d17008f99ed08e7c231b7e8e65cba1be51e2e747.tar.gz
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/serializers/import/bitbucket_server_provider_repo_entity.rb4
-rw-r--r--changelogs/unreleased/xanf-fix-bitbucket-server-importer-key.yml5
-rw-r--r--doc/administration/object_storage.md58
-rw-r--r--doc/development/telemetry/event_dictionary.md25
-rw-r--r--doc/development/telemetry/index.md117
-rw-r--r--doc/development/telemetry/usage_ping.md2
-rw-r--r--spec/controllers/import/bitbucket_server_controller_spec.rb6
-rw-r--r--spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb6
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb33
9 files changed, 141 insertions, 115 deletions
diff --git a/app/serializers/import/bitbucket_server_provider_repo_entity.rb b/app/serializers/import/bitbucket_server_provider_repo_entity.rb
index d818cac46cd..7c619cf4ebe 100644
--- a/app/serializers/import/bitbucket_server_provider_repo_entity.rb
+++ b/app/serializers/import/bitbucket_server_provider_repo_entity.rb
@@ -1,6 +1,10 @@
# frozen_string_literal: true
class Import::BitbucketServerProviderRepoEntity < Import::BitbucketProviderRepoEntity
+ expose :id, override: true do |repo|
+ "#{repo.project_key}/#{repo.slug}"
+ end
+
expose :provider_link, override: true do |repo, options|
repo.browse_url
end
diff --git a/changelogs/unreleased/xanf-fix-bitbucket-server-importer-key.yml b/changelogs/unreleased/xanf-fix-bitbucket-server-importer-key.yml
new file mode 100644
index 00000000000..b44f115236e
--- /dev/null
+++ b/changelogs/unreleased/xanf-fix-bitbucket-server-importer-key.yml
@@ -0,0 +1,5 @@
+---
+title: Fix failing bitbucket server import when project slug differs from name
+merge_request: 39433
+author:
+type: fixed
diff --git a/doc/administration/object_storage.md b/doc/administration/object_storage.md
index 4b9f331860e..49716883310 100644
--- a/doc/administration/object_storage.md
+++ b/doc/administration/object_storage.md
@@ -21,6 +21,12 @@ GitLab has been tested on a number of object storage providers:
- On-premises hardware and appliances from various storage vendors.
- MinIO. We have [a guide to deploying this](https://docs.gitlab.com/charts/advanced/external-object-storage/minio.html) within our Helm Chart documentation.
+### Known compatibility issues
+
+- Dell EMC ECS: Prior to GitLab 13.3, there is a [known bug in GitLab Workhorse that prevents
+ HTTP Range Requests from working with CI job artifacts](https://gitlab.com/gitlab-org/gitlab/-/issues/223806).
+ Be sure to upgrade to GitLab v13.3.0 or above if you use S3 storage with this hardware.
+
## Configuration guides
There are two ways of specifying object storage configuration in GitLab:
@@ -88,6 +94,11 @@ See the section on [ETag mismatch errors](#etag-mismatch) for more details.
'aws_access_key_id' => '<AWS_ACCESS_KEY_ID>',
'aws_secret_access_key' => '<AWS_SECRET_ACCESS_KEY>'
}
+ # OPTIONAL: The following lines are only needed if server side encryption is required
+ gitlab_rails['object_store']['storage_options'] = {
+ 'server_side_encryption' => '<AES256 or aws:kms>',
+ 'server_side_encryption_kms_key_id' => '<arn:s3:aws:xxx>'
+ }
gitlab_rails['object_store']['objects']['artifacts']['bucket'] = '<artifacts>'
gitlab_rails['object_store']['objects']['external_diffs']['bucket'] = '<external-diffs>'
gitlab_rails['object_store']['objects']['lfs']['bucket'] = '<lfs-objects>'
@@ -123,6 +134,9 @@ See the section on [ETag mismatch errors](#etag-mismatch) for more details.
aws_access_key_id: <AWS_ACCESS_KEY_ID>
aws_secret_access_key: <AWS_SECRET_ACCESS_KEY>
region: <eu-central-1>
+ storage_options:
+ server_side_encryption: <AES256 or aws:kms>
+ server_side_encryption_key_kms_id: <arn:s3:aws:xxx>
objects:
artifacts:
bucket: <artifacts>
@@ -188,7 +202,8 @@ gitlab_rails['object_store']['connection'] = {
|---------|-------------|
| `enabled` | Enable/disable object storage |
| `proxy_download` | Set to `true` to [enable proxying all files served](#proxy-download). Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data |
-| `connection` | Various connection options described below |
+| `connection` | Various [connection options](#connection-settings) described below |
+| `storage_options` | Options to use when saving new objects, such as [server side encryption](#server-side-encryption-headers). Introduced in GitLab 13.3 |
| `objects` | [Object-specific configuration](#object-specific-configuration)
### Connection settings
@@ -588,21 +603,46 @@ configuration.
#### Encrypted S3 buckets
-> - Introduced in [GitLab 13.1](https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/466) for instance profiles only.
-> - Introduced in [GitLab 13.2](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34460) for static credentials when [consolidated object storage configuration](#consolidated-object-storage-configuration) is used.
+> - Introduced in [GitLab 13.1](https://gitlab.com/gitlab-org/gitlab-workhorse/-/merge_requests/466) for instance profiles only and [S3 default encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html).
+> - Introduced in [GitLab 13.2](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34460) for static credentials when [consolidated object storage configuration](#consolidated-object-storage-configuration) and [S3 default encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html) are used.
When configured either with an instance profile or with the consolidated
-object configuration, GitLab Workhorse properly uploads files to S3 buckets
-that have [SSE-S3 or SSE-KMS encryption enabled by
+object configuration, GitLab Workhorse properly uploads files to S3
+buckets that have [SSE-S3 or SSE-KMS encryption enabled by
default](https://docs.aws.amazon.com/kms/latest/developerguide/services-s3.html).
-Note that customer master keys (CMKs) and
-SSE-C encryption are [not yet supported since this requires supplying
-keys to the GitLab configuration](https://gitlab.com/gitlab-org/gitlab/-/issues/226006).
+Note that customer master keys (CMKs) and SSE-C encryption are [not
+supported since this requires sending the encryption keys in every request](https://gitlab.com/gitlab-org/gitlab/-/issues/226006).
+
+##### Server-side encryption headers
+
+> Introduced in [GitLab 13.3](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38240).
+
+Setting a default encryption on an S3 bucket is the easiest way to
+enable encryption, but you may want to [set a bucket policy to ensure
+only encrypted objects are uploaded](https://aws.amazon.com/premiumsupport/knowledge-center/s3-bucket-store-kms-encrypted-objects/).
+To do this, you must configure GitLab to send the proper encryption headers
+in the `storage_options` configuration section:
+
+| Setting | Description |
+|-------------------------------------|-------------|
+| `server_side_encryption` | Encryption mode (AES256 or aws:kms) |
+| `server_side_encryption_kms_key_id` | Amazon Resource Name. Only needed when `aws:kms` is used in `server_side_encryption`. See the [Amazon documentation on using KMS encryption](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingKMSEncryption.html) |
+
+As with the case for default encryption, these options only work when
+the Workhorse S3 client is enabled. One of the following two conditions
+must be fulfilled:
+
+- `use_iam_profile` is `true` in the connection settings.
+- Consolidated object storage settings are in use.
+
+[ETag mismatch errors](#etag-mismatch) will occur if server side
+encryption headers are used without enabling the Workhorse S3 client.
##### Disabling the feature
The Workhorse S3 client is enabled by default when the
-[`use_iam_profile` configuration option](#iam-permissions) is set to `true`.
+[`use_iam_profile` configuration option](#iam-permissions) is set to `true` or consolidated
+object storage settings are configured.
The feature can be disabled using the `:use_workhorse_s3_client` feature flag. To disable the
feature, ask a GitLab administrator with
diff --git a/doc/development/telemetry/event_dictionary.md b/doc/development/telemetry/event_dictionary.md
index 12017da8cf5..d8cc32ea8d0 100644
--- a/doc/development/telemetry/event_dictionary.md
+++ b/doc/development/telemetry/event_dictionary.md
@@ -6,8 +6,27 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Event Dictionary
-An event dictionary is a single source of truth that outlines what events and properties you track, why you're tracking them, and where they are tracked. This is a living document that is updated any time a new event is planned or implemented.
+**Note: We've temporarily moved the Event Dictionary to a [Google Sheet](https://docs.google.com/spreadsheets/d/1VzE8R72Px_Y_LlE3Z05LxUlG_dumWe3vl-HeUo70TPw/edit?usp=sharing)**. The previous Markdown table exceeded 600 rows making it difficult to manage. In the future, our intention is to move this back into our docs using a [YAML file](https://gitlab.com/gitlab-org/gitlab-docs/-/issues/823).
-## Usage Ping
+The event dictionary is a single source of truth for the metrics and events we collect for product usage data. The Event Dictionary lists all the metrics and events we track, why we're tracking them, and where they are tracked.
-Please see this [Google Sheet](https://docs.google.com/spreadsheets/d/1VzE8R72Px_Y_LlE3Z05LxUlG_dumWe3vl-HeUo70TPw/edit?usp=sharing). We've temporarily moved this table as the table exceeded 600 rows making it difficult to manage.
+This is a living document that is updated any time a new event is planned or implemented. It includes the following information.
+
+- Section, stage, or group
+- Description
+- Implementation status
+- Availability by plan type
+- Code path
+
+We're currently focusing our Event Dictionary on [Usage Ping](usage_ping.md). In the future, we will also include [Snowplow](snowplow.md). We currently have an initiative across the entire product organization to complete the [Event Dictionary for Usage Ping](https://gitlab.com/groups/gitlab-org/-/epics/4174).
+
+## Instructions
+
+1. Open the Event Dictionary and fill in all the **PM to edit** columns highlighted in yellow.
+1. Check that all the metrics and events are assigned to the correct section, stage, or group. If a metric is used across many groups, assign it to the stage. If a metric is used across many stages, assign it to the section. If a metric is incorrectly assigned to another section, stage, or group, let the PM know you have reassigned it. If your group has no assigned metrics and events, check that your metrics and events are not incorrectly assigned to another PM.
+1. Add descriptions of what your metrics and events are tracking. Work with your Engineering team or the Telemetry team if you need help understanding this.
+1. Add what plans this metric is available on. Work with your Engineering team or the Telemetry team if you need help understanding this.
+
+## Planned metrics and events
+
+For future metrics and events you plan to track, please add them to the Event Dictionary and note the status as `Planned`, `In Progress`, or `Implemented`. Once you have confirmed the metric has been implemented and have confirmed the metric data is in our data warehouse, change the status to **Data Available**.
diff --git a/doc/development/telemetry/index.md b/doc/development/telemetry/index.md
index 501f6661250..b5032ce3730 100644
--- a/doc/development/telemetry/index.md
+++ b/doc/development/telemetry/index.md
@@ -6,7 +6,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Telemetry Guide
-At GitLab, we collect telemetry for the purpose of helping us build a better product. Data helps GitLab understand which parts of the product need improvement and which features we should build next. Telemetry also helps our team better understand the reasons why people use GitLab. With this knowledge we are able to make better product decisions.
+At GitLab, we collect product usage data for the purpose of helping us build a better product. Data helps GitLab understand which parts of the product need improvement and which features we should build next. Product usage data also helps our team better understand the reasons why people use GitLab. With this knowledge we are able to make better product decisions.
We encourage users to enable tracking, and we embrace full transparency with our tracking approach so it can be easily understood and trusted.
@@ -17,12 +17,11 @@ By enabling tracking, users can:
## Our tracking tools
-We use several different technologies to gather product usage data:
+We use three methods to gather product usage data:
- [Snowplow](#snowplow)
- [Usage Ping](#usage-ping)
- [Database import](#database-import)
-- [Log system](#log-system)
### Snowplow
@@ -47,23 +46,33 @@ For more details, read the [Usage Ping](usage_ping.md) guide.
Database imports are full imports of data into GitLab's data warehouse. For GitLab.com, the PostgreSQL database is loaded into Snowflake data warehouse every 6 hours. For more details, see the [data team handbook](https://about.gitlab.com/handbook/business-ops/data-team/platform/#extract-and-load).
-### Log system
-
-System logs are the application logs generated from running the GitLab Rails application. For more details, see the [log system](../../administration/logs.md) and [logging infrastructure](https://gitlab.com/gitlab-com/runbooks/tree/master/logging/doc#logging-infrastructure-overview).
-
## What data can be tracked
Our different tracking tools allows us to track different types of events. The event types and examples of what data can be tracked are outlined below.
-| Event Type | Snowplow JS (Frontend) | Snowplow Ruby (Backend) | Usage Ping | Database import | Log system |
-|---------------------|------------------------|-------------------------|---------------------|---------------------|---------------------|
-| Pageview events | **{check-circle}** | **{check-circle}** | **{dotted-circle}** | **{dotted-circle}** | **{dotted-circle}** |
-| UI events | **{check-circle}** | **{dotted-circle}** | **{dotted-circle}** | **{dotted-circle}** | **{dotted-circle}** |
-| CRUD and API events | **{dotted-circle}** | **{check-circle}** | **{dotted-circle}** | **{dotted-circle}** | **{dotted-circle}** |
-| Database records | **{dotted-circle}** | **{dotted-circle}** | **{check-circle}** | **{check-circle}** | **{dotted-circle}** |
-| Instance logs | **{dotted-circle}** | **{dotted-circle}** | **{dotted-circle}** | **{dotted-circle}** | **{check-circle}** |
-| Instance settings | **{dotted-circle}** | **{dotted-circle}** | **{check-circle}** | **{dotted-circle}** | **{dotted-circle}** |
-| Instance integrations | **{dotted-circle}** | **{dotted-circle}** | **{check-circle}** | **{dotted-circle}** | **{dotted-circle}** |
+The availability of event types and their tracking tools varies by segment. For example, on Self-Managed Users, we only have reporting using Database records via Usage Ping.
+
+| Event Types | SaaS Instance | SaaS Plan | SaaS Group | SaaS Session | SaaS User | SM Instance | SM Plan | SM Group | SM Session | SM User |
+|----------------------------------------|---------------|-----------|------------|--------------|-----------|-------------|---------|----------|------------|---------|
+| Snowplow (JS Pageview events) | ✅ | 📅 | 📅 | ✅ | 📅 | 📅 | 📅 | 📅 | 📅 | 📅 |
+| Snowplow (JS UI events) | ✅ | 📅 | 📅 | ✅ | 📅 | 📅 | 📅 | 📅 | 📅 | 📅 |
+| Snowplow (Ruby Pageview events) | ✅ | 📅 | 📅 | ✅ | 📅 | 📅 | 📅 | 📅 | 📅 | 📅 |
+| Snowplow (Ruby CRUD / API events) | ✅ | 📅 | 📅 | ✅ | 📅 | 📅 | 📅 | 📅 | 📅 | 📅 |
+| Usage Ping (Redis UI counters) | 🔄 | 🔄 | 🔄 | ✖️ | 🔄 | 🔄 | 🔄 | 🔄 | ✖️ | 🔄 |
+| Usage Ping (Redis Pageview counters) | 🔄 | 🔄 | 🔄 | ✖️ | 🔄 | 🔄 | 🔄 | 🔄 | ✖️ | 🔄 |
+| Usage Ping (Redis CRUD / API counters) | 🔄 | 🔄 | 🔄 | ✖️ | 🔄 | 🔄 | 🔄 | 🔄 | ✖️ | 🔄 |
+| Usage Ping (Database counters) | ✅ | 🔄 | 📅 | ✖️ | ✅ | ✅ | ✅ | ✅ | ✖️ | ✅ |
+| Usage Ping (Instance settings) | ✅ | 🔄 | 📅 | ✖️ | ✅ | ✅ | ✅ | ✅ | ✖️ | ✅ |
+| Usage Ping (Integration settings) | ✅ | 🔄 | 📅 | ✖️ | ✅ | ✅ | ✅ | ✅ | ✖️ | ✅ |
+| Database import (Database records) | ✅ | ✅ | ✅ | ✖️ | ✅ | ✖️ | ✖️ | ✖️ | ✖️ | ✖️ |
+
+[Source file](https://docs.google.com/spreadsheets/d/1e8Afo41Ar8x3JxAXJF3nL83UxVZ3hPIyXdt243VnNuE/edit?usp=sharing)
+
+**Legend**
+
+✅ Available, 🔄 In Progress, 📅 Planned, ✖️ Not Possible
+
+SaaS = GitLab.com. SM = Self-Managed instance
### Pageview events
@@ -88,62 +97,51 @@ These are backend events that include the creation, read, update, deletion of re
These are raw database records which can be explored using business intelligence tools like Sisense. The full list of available tables can be found in [structure.sql](https://gitlab.com/gitlab-org/gitlab/-/blob/master/db/structure.sql).
-### Instance logs
-
-These are raw logs such as the [Production logs](../../administration/logs.md#production_jsonlog), [API logs](../../administration/logs.md#api_jsonlog), or [Sidekiq logs](../../administration/logs.md#sidekiqlog). See the [overview of Logging Infrastructure](https://gitlab.com/gitlab-com/runbooks/tree/master/logging/doc#logging-infrastructure-overview) for more details.
-
### Instance settings
These are settings of your instance such as the instance's Git version and if certain features are enabled such as `container_registry_enabled`.
-### Instance integrations
+### Integration settings
These are integrations your GitLab instance interacts with such as an [external storage provider](../../administration/static_objects_external_storage.md) or an [external container registry](../../administration/packages/container_registry.md#use-an-external-container-registry-with-gitlab-as-an-auth-endpoint). These services must be able to send data back into a GitLab instance for data to be tracked.
-## Reporting level by segment
+## Reporting level
Our reporting levels of aggregate or individual reporting varies by segment. For example, on Self-Managed Users, we can report at an aggregate user level using Usage Ping but not on an Individual user level.
-| Reporting level | SaaS Instance | SaaS Group | SaaS Session | SaaS User | Self-Managed Instance | Self-Managed Group | Self-Managed Session | Self-Managed User |
-|-----------------|---------------|------------|--------------|-----------|-----------------------|--------------------|----------------------|-------------------|
-| Aggregate | ✅ | 📅 | ✅ | ✅ | ✅ | 📅 | ✅ | ✅ |
-| Individual | ✅ | 📅 | ✅ | 🔄 | ✅ | ✖️ | ✖️ | ✖️ |
+| Aggregated Reporting | SaaS Instance | SaaS Plan | SaaS Group | SaaS Session | SaaS User | SM Instance | SM Plan | SM Group | SM Session | SM User |
+|----------------------|---------------|-----------|------------|--------------|-----------|-------------|---------|----------|------------|---------|
+| Snowplow | ✅ | 📅 | 📅 | ✅ | 📅 | ✅ | 📅 | 📅 | ✅ | 📅 |
+| Usage Ping | ✅ | 🔄 | 📅 | 📅 | ✅ | ✅ | ✅ | ✅ | 📅 | ✅ |
+| Database import | ✅ | ✅ | ✅ | ✖️ | ✅ | ✖️ | ✖️ | ✖️ | ✖️ | ✖️ |
-## Event types by segment
-
-The availability of event types and their tracking tools varies by segment. For example, on Self-Managed Users, we only have reporting using Database records via Usage Ping.
-
-| Event Types | SaaS Instance | SaaS Group | SaaS Session | SaaS User | Self-Managed Instance | Self-Managed Group | Self-Managed Session | Self-Managed User |
-|-------------------------------------|---------------|------------|--------------|-----------|-----------------------|--------------------|----------------------|-------------------|
-| Pageview events (Snowplow JS) | ✅ | 📅 | ✅ | 🔄 | 🔄 | 📅 | 🔄 | 🔄 |
-| Pageview events (Snowplow Ruby) | ✅ | 📅 | ✅ | 🔄 | 🔄 | 📅 | 🔄 | 🔄 |
-| UI events (Snowplow JS) | ✅ | 📅 | ✅ | 🔄 | 🔄 | 📅 | 🔄 | 🔄 |
-| CRUD and API events (Snowplow Ruby) | ✅ | 📅 | ✅ | 🔄 | 🔄 | 📅 | 🔄 | 🔄 |
-| Database records (Usage Ping) | ✅ | 📅 | ✖️ | ✅ | ✅ | 📅 | ✖️ | ✅ |
-| Database records (Database import) | ✅ | ✅ | ✖️ | ✅ | ✖️ | ✖️ | ✖️ | ✖️ |
-| Instance logs (Log system) | ✖️ | ✖️ | ✖️ | ✖️ | ✖️ | ✖️ | ✖️ | ✖️ |
-| Instance settings (Usage Ping) | ✅ | 📅 | ✖️ | ✅ | ✅ | 📅 | ✖️ | ✅ |
-| Instance integrations (Usage Ping) | ✅ | 📅 | ✖️ | ✅ | ✅ | 📅 | ✖️ | ✅ |
+| Identifiable Reporting | SaaS Instance | SaaS Plan | SaaS Group | SaaS Session | SaaS User | SM Instance | SM Plan | SM Group | SM Session | SM User |
+|------------------------|---------------|-----------|------------|--------------|-----------|-------------|---------|----------|------------|---------|
+| Snowplow | ✅ | 📅 | 📅 | ✅ | 📅 | ✖️ | ✖️ | ✖️ | ✖️ | ✖️ |
+| Usage Ping | ✅ | 🔄 | 📅 | ✖️ | ✖️ | ✅ | ✅ | ✖️ | ✖️ | ✖️ |
+| Database import | ✅ | ✅ | ✅ | ✖️ | ✅ | ✖️ | ✖️ | ✖️ | ✖️ | ✖️ |
**Legend**
✅ Available, 🔄 In Progress, 📅 Planned, ✖️ Not Possible
-## Reporting time period by segment
+SaaS = GitLab.com. SM = Self-Managed instance
+
+## Reporting time period
Our reporting time periods varies by segment. For example, on Self-Managed Users, we can report all time counts and 28 day counts in Usage Ping.
-| Reporting time period | SaaS Instance | SaaS Group | SaaS Session | SaaS User | Self-Managed Instance | Self-Managed Group | Self-Managed Session | Self-Managed User |
-|-----------------------|---------------|------------|--------------|-----------|-----------------------|--------------------|----------------------|-------------------|
-| All Time | ✅ | 📅 | ✅ | ✅ | ✅ | 📅 | 🔄 | ✅ |
-| 28 Days | ✅ | 📅 | ✅ | ✅ | ✅ | 📅 | 🔄 | ✅ |
-| Daily | ✅ | 📅 | ✅ | ✅ | ✖️ | ✖️ | ✖️ | ✖️ |
+| Reporting Time Period | All Time | 28 Days | 7 Days | Daily |
+|-----------------------|----------|---------|--------|-------|
+| Snowplow | ✅ | ✅ | ✅ | ✅ |
+| Usage Ping | ✅ | ✅ | 📅 | ✖️ |
+| Database import | ✅ | ✅ | ✅ | ✅ |
**Legend**
✅ Available, 🔄 In Progress, 📅 Planned, ✖️ Not Possible
-## Telemetry systems overview
+## Systems overview
The systems overview is a simplified diagram showing the interactions between GitLab Inc and self-managed instances.
@@ -172,31 +170,8 @@ As shown by the orange lines, on GitLab.com Snowplow JS, Snowplow Ruby, Usage Pi
As shown by the green lines, on GitLab.com system logs flow into GitLab Inc's monitoring infrastructure. On self-managed, there are no logs sent to GitLab Inc's monitoring infrastructure.
-The differences between GitLab.com and self-managed are summarized below:
-
-| Environment | Snowplow JS (Frontend) | Snowplow Ruby (Backend) | Usage Ping | Database import | Logs system |
-|--------------|------------------------|-------------------------|--------------------|---------------------|---------------------|
-| GitLab.com | **{check-circle}** | **{check-circle}** | **{check-circle}** | **{check-circle}** | **{check-circle}** |
-| Self-Managed | **{dotted-circle}**(1) | **{dotted-circle}**(1) | **{check-circle}** | **{dotted-circle}** | **{dotted-circle}** |
-
Note (1): Snowplow JS and Snowplow Ruby are available on self-managed, however, the Snowplow Collector endpoint is set to a self-managed Snowplow Collector which GitLab Inc does not have access to.
-## Snowflake data warehouse
-
-The Snowflake data warehouse is where we keep all of GitLab Inc's data.
-
-### Data sources
-
-There are several data sources available in Snowflake and Sisense each representing a different view of the data along the transformation pipeline.
-
-| Source | Description | Access |
-| ------ | ------ | ------ |
-| raw | These tables are the raw data source | Access via Snowflake |
-| analytics_staging | These tables have undergone little to no data transformation, meaning they're basically clones of the raw data source | Access via Snowflake or Sisense |
-| analytics | These tables have typically undergone more data transformation. They will typically end in `_xf` to represent the fact that they are transformed | Access via Snowflake or Sisense |
-
-If you are a Product Manager interested in the raw data, you will likely focus on the `analytics` and `analytics_staging` sources. The raw source is limited to the data and infrastructure teams. For more information, please see [Data For Product Managers: What's the difference between analytics_staging and analytics?](https://about.gitlab.com/handbook/business-ops/data-team/programs/data-for-product-managers/#whats-the-difference-between-analytics_staging-and-analytics)
-
## Additional information
More useful links:
diff --git a/doc/development/telemetry/usage_ping.md b/doc/development/telemetry/usage_ping.md
index db470891a19..afb800b85fc 100644
--- a/doc/development/telemetry/usage_ping.md
+++ b/doc/development/telemetry/usage_ping.md
@@ -359,7 +359,7 @@ We also use `#database-lab` and [explain.depesz.com](https://explain.depesz.com/
### 4. Add the metric definition
-When adding, changing, or updating metrics, please update the [Event dictionary Usage Ping table](event_dictionary.md#usage-ping).
+When adding, changing, or updating metrics, please update the [Event Dictionary's **Usage Ping** table](event_dictionary.md).
### 5. Add new metric to Versions Application
diff --git a/spec/controllers/import/bitbucket_server_controller_spec.rb b/spec/controllers/import/bitbucket_server_controller_spec.rb
index bb80de6425f..d5f94be65b6 100644
--- a/spec/controllers/import/bitbucket_server_controller_spec.rb
+++ b/spec/controllers/import/bitbucket_server_controller_spec.rb
@@ -139,8 +139,6 @@ RSpec.describe Import::BitbucketServerController do
describe 'GET status' do
render_views
- let(:repos) { instance_double(BitbucketServer::Collection) }
-
before do
allow(controller).to receive(:client).and_return(client)
@@ -157,14 +155,14 @@ RSpec.describe Import::BitbucketServerController do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['incompatible_repos'].length).to eq(1)
- expect(json_response.dig("incompatible_repos", 0, "id")).to eq(@invalid_repo.full_name)
+ expect(json_response.dig("incompatible_repos", 0, "id")).to eq("#{@invalid_repo.project_key}/#{@invalid_repo.slug}")
expect(json_response['provider_repos'].length).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).to eq(@repo.full_name)
end
it_behaves_like 'import controller status' do
let(:repo) { @repo }
- let(:repo_id) { @repo.full_name }
+ let(:repo_id) { "#{@repo.project_key}/#{@repo.slug}" }
let(:import_source) { @repo.browse_url }
let(:provider_name) { 'bitbucket_server' }
let(:client_repos_field) { :repos }
diff --git a/spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb b/spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb
index 8fe9fa16904..894bf6ac770 100644
--- a/spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb
+++ b/spec/serializers/import/bitbucket_server_provider_repo_entity_spec.rb
@@ -6,8 +6,10 @@ RSpec.describe Import::BitbucketServerProviderRepoEntity do
let(:repo_data) do
{
'name' => 'test',
+ 'slug' => 'TEST',
'project' => {
- 'name' => 'demo'
+ 'name' => 'demo',
+ 'key' => 'DEM'
},
'links' => {
'self' => [
@@ -27,7 +29,7 @@ RSpec.describe Import::BitbucketServerProviderRepoEntity do
it_behaves_like 'exposes required fields for import entity' do
let(:expected_values) do
{
- id: 'demo/test',
+ id: 'DEM/TEST',
full_name: 'demo/test',
sanitized_name: 'test',
provider_link: 'http://local.bitbucket.server/demo/test.git'
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 55f92d6bd0a..85bcf4562b1 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -5,9 +5,10 @@ require 'spec_helper'
RSpec.describe MergeRequests::PushOptionsHandlerService do
include ProjectForksHelper
- let(:user) { create(:user) }
- let(:project) { create(:project, :public, :repository) }
- let(:forked_project) { fork_project(project, user, repository: true) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { create(:user, developer_projects: [project]) }
+ let_it_be(:forked_project) { fork_project(project, user, repository: true) }
+
let(:service) { described_class.new(project, user, changes, push_options) }
let(:source_branch) { 'fix' }
let(:target_branch) { 'feature' }
@@ -21,28 +22,14 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{project.default_branch}" }
- before do
- project.add_developer(user)
- end
-
shared_examples_for 'a service that can create a merge request' do
subject(:last_mr) { MergeRequest.last }
- it 'creates a merge request' do
- expect { service.execute }.to change { MergeRequest.count }.by(1)
- end
-
- it 'sets the correct target branch' do
+ it 'creates a merge request with the correct target branch and assigned user' do
branch = push_options[:target] || project.default_branch
- service.execute
-
+ expect { service.execute }.to change { MergeRequest.count }.by(1)
expect(last_mr.target_branch).to eq(branch)
- end
-
- it 'assigns the MR to the user' do
- service.execute
-
expect(last_mr.assignees).to contain_exactly(user)
end
@@ -54,15 +41,10 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
allow(forked_project).to receive(:empty_repo?).and_return(false)
end
- it 'sets the correct source project' do
+ it 'sets the correct source and target project' do
service.execute
expect(last_mr.source_project).to eq(forked_project)
- end
-
- it 'sets the correct target project' do
- service.execute
-
expect(last_mr.target_project).to eq(project)
end
end
@@ -746,6 +728,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
describe 'when MRs are not enabled' do
+ let(:project) { create(:project, :public, :repository).tap { |pr| pr.add_developer(user) } }
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }