Commit 2c3a34cb authored by Phil Hughes's avatar Phil Hughes

Merge branch 'master' into ce-to-ee-2018-03-27

parents 19a007c5 647e3164
VERSION merge=ours
...@@ -377,6 +377,7 @@ ee-files-location-check: ...@@ -377,6 +377,7 @@ ee-files-location-check:
stage: test stage: test
before_script: [] before_script: []
cache: {} cache: {}
retry: 0
script: script:
- scripts/ee-files-location-check - scripts/ee-files-location-check
only: only:
...@@ -639,6 +640,7 @@ db:rollback-mysql: ...@@ -639,6 +640,7 @@ db:rollback-mysql:
db:rollback-pg-geo: &db-rollback db:rollback-pg-geo: &db-rollback
<<: *db-rollback <<: *db-rollback
<<: *use-pg <<: *use-pg
<<: *except-docs
script: script:
- bundle exec rake geo:db:migrate VERSION=20170627195211 - bundle exec rake geo:db:migrate VERSION=20170627195211
- bundle exec rake geo:db:migrate - bundle exec rake geo:db:migrate
......
...@@ -121,7 +121,7 @@ gem 'carrierwave', '~> 1.2' ...@@ -121,7 +121,7 @@ gem 'carrierwave', '~> 1.2'
gem 'dropzonejs-rails', '~> 0.7.1' gem 'dropzonejs-rails', '~> 0.7.1'
# for backups # for backups
gem 'fog-aws', '~> 2.0' gem 'fog-aws', '~> 2.0.1'
gem 'fog-core', '~> 1.44' gem 'fog-core', '~> 1.44'
gem 'fog-google', '~> 1.3.3' gem 'fog-google', '~> 1.3.3'
gem 'fog-local', '~> 0.3' gem 'fog-local', '~> 0.3'
......
...@@ -1082,7 +1082,7 @@ DEPENDENCIES ...@@ -1082,7 +1082,7 @@ DEPENDENCIES
flipper-active_record (~> 0.13.0) flipper-active_record (~> 0.13.0)
flipper-active_support_cache_store (~> 0.13.0) flipper-active_support_cache_store (~> 0.13.0)
fog-aliyun (~> 0.2.0) fog-aliyun (~> 0.2.0)
fog-aws (~> 2.0) fog-aws (~> 2.0.1)
fog-core (~> 1.44) fog-core (~> 1.44)
fog-google (~> 1.3.3) fog-google (~> 1.3.3)
fog-local (~> 0.3) fog-local (~> 0.3)
......
.ci-body {
.incorrect-syntax {
font-size: 18px;
color: $lint-incorrect-color;
}
.correct-syntax {
font-size: 18px;
color: $lint-correct-color;
}
}
.ci-linter {
.ci-editor {
height: 400px;
}
.ci-template pre {
white-space: pre-wrap;
}
}
...@@ -1121,3 +1121,25 @@ pre.light-well { ...@@ -1121,3 +1121,25 @@ pre.light-well {
padding-top: $gl-padding; padding-top: $gl-padding;
padding-bottom: 37px; padding-bottom: 37px;
} }
.project-ci-body {
.incorrect-syntax {
font-size: 18px;
color: $lint-incorrect-color;
}
.correct-syntax {
font-size: 18px;
color: $lint-correct-color;
}
}
.project-ci-linter {
.ci-editor {
height: 400px;
}
.ci-template pre {
white-space: pre-wrap;
}
}
...@@ -4,20 +4,5 @@ module Ci ...@@ -4,20 +4,5 @@ module Ci
def show def show
end end
def create
@content = params[:content]
@error = Gitlab::Ci::YamlProcessor.validation_message(@content)
@status = @error.blank?
if @error.blank?
@config_processor = Gitlab::Ci::YamlProcessor.new(@content)
@stages = @config_processor.stages
@builds = @config_processor.builds
@jobs = @config_processor.jobs
end
render :show
end
end end
end end
class Projects::Ci::LintsController < Projects::ApplicationController
before_action :authorize_create_pipeline!
def show
end
def create
@content = params[:content]
@error = Gitlab::Ci::YamlProcessor.validation_message(@content, yaml_processor_options)
@status = @error.blank?
if @error.blank?
@config_processor = Gitlab::Ci::YamlProcessor.new(@content, yaml_processor_options)
@stages = @config_processor.stages
@builds = @config_processor.builds
@jobs = @config_processor.jobs
end
render :show
end
private
def yaml_processor_options
{ project: @project, sha: project.repository.commit.sha }
end
end
...@@ -29,12 +29,12 @@ module Projects ...@@ -29,12 +29,12 @@ module Projects
@project_runners = @project.runners.ordered @project_runners = @project.runners.ordered
@assignable_runners = current_user.ci_authorized_runners @assignable_runners = current_user.ci_authorized_runners
.assignable_for(project).ordered.page(params[:page]).per(20) .assignable_for(project).ordered.page(params[:page]).per(20)
@shared_runners = Ci::Runner.shared.active @shared_runners = ::Ci::Runner.shared.active
@shared_runners_count = @shared_runners.count(:all) @shared_runners_count = @shared_runners.count(:all)
end end
def define_secret_variables def define_secret_variables
@variable = Ci::Variable.new(project: project) @variable = ::Ci::Variable.new(project: project)
.present(current_user: current_user) .present(current_user: current_user)
@variables = project.variables.order_key_asc @variables = project.variables.order_key_asc
.map { |variable| variable.present(current_user: current_user) } .map { |variable| variable.present(current_user: current_user) }
...@@ -42,7 +42,7 @@ module Projects ...@@ -42,7 +42,7 @@ module Projects
def define_triggers_variables def define_triggers_variables
@triggers = @project.triggers @triggers = @project.triggers
@trigger = Ci::Trigger.new @trigger = ::Ci::Trigger.new
end end
def define_badges_variables def define_badges_variables
......
...@@ -11,6 +11,7 @@ module Ci ...@@ -11,6 +11,7 @@ module Ci
before_save :set_size, if: :file_changed? before_save :set_size, if: :file_changed?
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
mount_uploader :file, JobArtifactUploader mount_uploader :file, JobArtifactUploader
......
...@@ -7,6 +7,7 @@ class LfsObject < ActiveRecord::Base ...@@ -7,6 +7,7 @@ class LfsObject < ActiveRecord::Base
has_many :projects, through: :lfs_objects_projects has_many :projects, through: :lfs_objects_projects
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: LfsObjectUploader::Store::REMOTE) }
validates :oid, presence: true, uniqueness: true validates :oid, presence: true, uniqueness: true
......
...@@ -12,6 +12,7 @@ class Upload < ActiveRecord::Base ...@@ -12,6 +12,7 @@ class Upload < ActiveRecord::Base
validates :uploader, presence: true validates :uploader, presence: true
scope :with_files_stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
before_save :calculate_checksum!, if: :foreground_checksummable? before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable? after_commit :schedule_checksum, if: :checksummable?
......
- page_title "CI Lint" .row.empty-state
- page_description "Validate your GitLab CI configuration file" .col-xs-12
- content_for :library_javascripts do .svg-content
= page_specific_javascript_tag('lib/ace.js') = image_tag 'illustrations/feature_moved.svg'
.col-xs-12
%h2 Check your .gitlab-ci.yml .text-content.text-center
%h4= _("GitLab CI Linter has been moved")
.ci-linter %p
.row = _("To validate your GitLab CI configurations, go to 'CI/CD → Pipelines' inside your project, and click on the 'CI Lint' button.")
= form_tag ci_lint_path, method: :post do
.form-group
.col-sm-12
.file-holder
.js-file-title.file-title.clearfix
Content of .gitlab-ci.yml
#ci-editor.ci-editor= @content
= text_area_tag(:content, @content, class: 'hidden form-control span1', rows: 7, require: true)
.col-sm-12
.pull-left.prepend-top-10
= submit_tag('Validate', class: 'btn btn-success submit-yml')
.pull-right.prepend-top-10
= button_tag('Clear', type: 'button', class: 'btn btn-default clear-yml')
.row.prepend-top-20
.col-sm-12
.results.ci-template
= render partial: 'create' if defined?(@status)
- page_title "CI Lint"
- page_description "Validate your GitLab CI configuration file"
- content_for :library_javascripts do
= page_specific_javascript_tag('lib/ace.js')
%h2 Check your .gitlab-ci.yml
.project-ci-linter
.row
= form_tag project_ci_lint_path(@project), method: :post do
.form-group
.col-sm-12
.file-holder
.js-file-title.file-title.clearfix
Content of .gitlab-ci.yml
#ci-editor.ci-editor= @content
= text_area_tag(:content, @content, class: 'hidden form-control span1', rows: 7, require: true)
.col-sm-12
.pull-left.prepend-top-10
= submit_tag('Validate', class: 'btn btn-success submit-yml')
.pull-right.prepend-top-10
= button_tag('Clear', type: 'button', class: 'btn btn-default clear-yml')
.row.prepend-top-20
.col-sm-12
.results.project-ci-template
= render partial: 'create' if defined?(@status)
...@@ -13,6 +13,6 @@ ...@@ -13,6 +13,6 @@
"no-pipelines-svg-path" => image_path('illustrations/pipelines_pending.svg'), "no-pipelines-svg-path" => image_path('illustrations/pipelines_pending.svg'),
"can-create-pipeline" => can?(current_user, :create_pipeline, @project).to_s, "can-create-pipeline" => can?(current_user, :create_pipeline, @project).to_s,
"new-pipeline-path" => can?(current_user, :create_pipeline, @project) && new_project_pipeline_path(@project), "new-pipeline-path" => can?(current_user, :create_pipeline, @project) && new_project_pipeline_path(@project),
"ci-lint-path" => can?(current_user, :create_pipeline, @project) && ci_lint_path, "ci-lint-path" => can?(current_user, :create_pipeline, @project) && project_ci_lint_path(@project),
"reset-cache-path" => can?(current_user, :admin_pipeline, @project) && reset_cache_project_settings_ci_cd_path(@project) , "reset-cache-path" => can?(current_user, :admin_pipeline, @project) && reset_cache_project_settings_ci_cd_path(@project) ,
"has-gitlab-ci" => (@project.has_ci? && @project.builds_enabled?).to_s } } "has-gitlab-ci" => (@project.has_ci? && @project.builds_enabled?).to_s } }
...@@ -122,6 +122,7 @@ ...@@ -122,6 +122,7 @@
- cronjob:geo_file_download_dispatch - cronjob:geo_file_download_dispatch
- cronjob:geo_metrics_update - cronjob:geo_metrics_update
- cronjob:geo_prune_event_log - cronjob:geo_prune_event_log
- cronjob:geo_migrated_local_files_clean_up
- cronjob:geo_repository_sync - cronjob:geo_repository_sync
- cronjob:geo_repository_verification_primary_batch - cronjob:geo_repository_verification_primary_batch
- cronjob:geo_repository_verification_secondary_scheduler - cronjob:geo_repository_verification_secondary_scheduler
...@@ -140,6 +141,7 @@ ...@@ -140,6 +141,7 @@
- geo:geo_scheduler_secondary_scheduler - geo:geo_scheduler_secondary_scheduler
- geo:geo_file_download - geo:geo_file_download
- geo:geo_file_removal - geo:geo_file_removal
- geo:geo_file_registry_removal
- geo:geo_hashed_storage_attachments_migration - geo:geo_hashed_storage_attachments_migration
- geo:geo_hashed_storage_migration - geo:geo_hashed_storage_migration
- geo:geo_project_sync - geo:geo_project_sync
......
---
title: Move ci/lint under project's namespace
merge_request: 17729
author:
type: added
---
title: Port direct upload of LFS artifacts from EE
merge_request: 17752
author:
type: added
...@@ -288,6 +288,11 @@ production: &base ...@@ -288,6 +288,11 @@ production: &base
geo_file_download_dispatch_worker: geo_file_download_dispatch_worker:
cron: "*/1 * * * *" cron: "*/1 * * * *"
# GitLab Geo migrated local files clean up worker
# NOTE: This will only take effect if Geo is enabled (secondary nodes only)
geo_migrated_local_files_clean_up_worker:
cron: "15 */6 * * *"
registry: registry:
# enabled: true # enabled: true
# host: registry.example.com # host: registry.example.com
......
...@@ -482,6 +482,9 @@ Settings.cron_jobs['geo_repository_verification_primary_batch_worker']['job_clas ...@@ -482,6 +482,9 @@ Settings.cron_jobs['geo_repository_verification_primary_batch_worker']['job_clas
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['cron'] ||= '*/1 * * * *' Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['job_class'] ||= 'Geo::RepositoryVerification::Secondary::SchedulerWorker' Settings.cron_jobs['geo_repository_verification_secondary_scheduler_worker']['job_class'] ||= 'Geo::RepositoryVerification::Secondary::SchedulerWorker'
Settings.cron_jobs['geo_migrated_local_files_clean_up_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['geo_migrated_local_files_clean_up_worker']['cron'] ||= '15 */6 * * *'
Settings.cron_jobs['geo_migrated_local_files_clean_up_worker']['job_class'] ||= 'Geo::MigratedLocalFilesCleanUpWorker'
Settings.cron_jobs['import_export_project_cleanup_worker'] ||= Settingslogic.new({}) Settings.cron_jobs['import_export_project_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *' Settings.cron_jobs['import_export_project_cleanup_worker']['cron'] ||= '0 * * * *'
Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker' Settings.cron_jobs['import_export_project_cleanup_worker']['job_class'] = 'ImportExportProjectCleanupWorker'
......
namespace :ci do namespace :ci do
resource :lint, only: [:show, :create] resource :lint, only: :show
root to: redirect('') root to: redirect('')
end end
...@@ -318,6 +318,10 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do ...@@ -318,6 +318,10 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
post :keep post :keep
end end
end end
namespace :ci do
resource :lint, only: [:show, :create]
end
end end
draw :legacy_builds draw :legacy_builds
......
...@@ -104,8 +104,8 @@ Jobs are used to create jobs, which are then picked by ...@@ -104,8 +104,8 @@ Jobs are used to create jobs, which are then picked by
What is important is that each job is run independently from each other. What is important is that each job is run independently from each other.
If you want to check whether your `.gitlab-ci.yml` file is valid, there is a If you want to check whether the `.gitlab-ci.yml` of your project is valid, there is a
Lint tool under the page `/ci/lint` of your GitLab instance. You can also find Lint tool under the page `/ci/lint` of your project namespace. You can also find
a "CI Lint" button to go to this page under **CI/CD ➔ Pipelines** and a "CI Lint" button to go to this page under **CI/CD ➔ Pipelines** and
**Pipelines ➔ Jobs** in your project. **Pipelines ➔ Jobs** in your project.
......
...@@ -1290,9 +1290,6 @@ In this case, the variables `POSTGRES_USER`, `POSTGRES_PASSWORD` and ...@@ -1290,9 +1290,6 @@ In this case, the variables `POSTGRES_USER`, `POSTGRES_PASSWORD` and
`autodevops-template.yml` will be overridden by the ones defined in `autodevops-template.yml` will be overridden by the ones defined in
`.gitlab-ci.yml`. `.gitlab-ci.yml`.
NOTE: **Note:**
Momentarily the [CI Lint](https://gitlab.com/ci/lint) does not support the `include` keyword.
## `variables` ## `variables`
> Introduced in GitLab Runner v0.5.0. > Introduced in GitLab Runner v0.5.0.
...@@ -1667,8 +1664,9 @@ capitalization, the commit will be created but the pipeline will be skipped. ...@@ -1667,8 +1664,9 @@ capitalization, the commit will be created but the pipeline will be skipped.
## Validate the .gitlab-ci.yml ## Validate the .gitlab-ci.yml
Each instance of GitLab CI has an embedded debug tool called Lint. Each instance of GitLab CI has an embedded debug tool called Lint, which validates the
You can find the link under `/ci/lint` of your gitlab instance. content of your `.gitlab-ci.yml` files. You can find the Lint under the page `ci/lint` of your
project namespace (e.g, `http://gitlab-example.com/gitlab-org/project-123/ci/lint`)
## Using reserved keywords ## Using reserved keywords
......
...@@ -28,6 +28,10 @@ functionality that render cross-project data. That includes: ...@@ -28,6 +28,10 @@ functionality that render cross-project data. That includes:
This is to prevent performing to many requests at once to the external This is to prevent performing to many requests at once to the external
authorization service. authorization service.
Whenever access is granted or denied this is logged in a logfile called
`external-policy-access-control.log`.
Read more about logs GitLab keeps in the [omnibus documentation][omnibus-log-docs].
## Configuration ## Configuration
The external authorization service can be enabled by an admin on the GitLab's The external authorization service can be enabled by an admin on the GitLab's
...@@ -104,3 +108,4 @@ The label will be shown on all project pages in the upper right corner. ...@@ -104,3 +108,4 @@ The label will be shown on all project pages in the upper right corner.
![classification label on project page](img/classification_label_on_project_page.png) ![classification label on project page](img/classification_label_on_project_page.png)
[omnibus-ssl-docs]: https://docs.gitlab.com/omnibus/settings/ssl.html [omnibus-ssl-docs]: https://docs.gitlab.com/omnibus/settings/ssl.html
[omnibus-log-docs]: https://docs.gitlab.com/omnibus/settings/logs.html
...@@ -11,10 +11,17 @@ integration services must be enabled. ...@@ -11,10 +11,17 @@ integration services must be enabled.
## Metrics supported ## Metrics supported
| Name | Query | - Average Memory Usage (MB):
| ---- | ----- |
| Average Memory Usage (MB) | avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024 | ```
| Average CPU Utilization (%) | avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name)) | avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024
```
- Average CPU Utilization (%):
```
avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-([^c].*|c([^a]|a([^n]|n([^a]|a([^r]|r[^y])))).*|)-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))
```
## Configuring Prometheus to monitor for Kubernetes metrics ## Configuring Prometheus to monitor for Kubernetes metrics
...@@ -39,7 +46,14 @@ These metrics expect the [Deployment](https://kubernetes.io/docs/concepts/worklo ...@@ -39,7 +46,14 @@ These metrics expect the [Deployment](https://kubernetes.io/docs/concepts/worklo
### Canary metrics supported ### Canary metrics supported
| Name | Query | - Average Memory Usage (MB)
| ---- | ----- |
| Average Memory Usage (MB) | avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024 | ```
| Average CPU Utilization (%) | avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name)) | avg(sum(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) by (job)) without (job) / count(avg(container_memory_usage_bytes{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}) without (job)) /1024/1024
```
- Average CPU Utilization (%)
```
avg(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (job)) without (job) / count(sum(rate(container_cpu_usage_seconds_total{container_name!="POD",pod_name=~"^%{ci_environment_slug}-canary-(.*)",namespace="%{kube_namespace}"}[15m])) by (pod_name))
```
<script>
import CiIcon from '~/vue_shared/components/ci_icon.vue';
/**
* Renders the error row for each security report
*/
export default {
name: 'SecurityErrorRow',
components: {
CiIcon,
},
computed: {
iconStatus() {
return {
group: 'warning',
icon: 'status_warning',
};
},
},
};
</script>
<template>
<div class="report-block-list-issue prepend-left-default append-right-default">
<div class="report-block-list-icon append-right-10 prepend-left-5">
<ci-icon :status="iconStatus" />
</div>
<div class="report-block-list-issue-description">
{{ __("There was an error loading results") }}
</div>
</div>
</template>
<script>
import Icon from '~/vue_shared/components/icon.vue';
import popover from '~/vue_shared/directives/popover';
export default {
name: 'SecurityReportsHelpPopover',
components: {
Icon,
},
directives: {
popover,
},
props: {
options: {
type: Object,
required: true,
},
},
computed: {
popoverOptions() {
return {
html: true,
trigger: 'focus',
placement: 'top',
template:
'<div class="popover" role="tooltip"><div class="arrow"></div><p class="popover-title"></p><div class="popover-content"></div></div>',
...this.options,
};
},
},
};
</script>
<template>
<button
type="button"
class="btn btn-transparent"
v-popover="popoverOptions"
tabindex="0"
>
<icon name="question" />
</button>
</template>
<script>
import LoadingIcon from '~/vue_shared/components/loading_icon.vue';
/**
* Renders the loading row for each security report
*/
export default {
name: 'SecurityLoadingRow',
components: {
LoadingIcon,
},
};
</script>
<template>
<div class="report-block-list-issue prepend-left-default append-right-default">
<div class="report-block-list-icon append-right-10 prepend-left-5">
<loading-icon />
</div>
<div class="report-block-list-issue-description">
{{ __("in progress") }}
</div>
</div>
</template>
<script>
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import Popover from './help_popover.vue';
/**
* Renders the summary row for each security report
*/
export default {
name: 'SecuritySummaryRow',
components: {
CiIcon,
Popover,
},
props: {
summary: {
type: String,
required: true,
},
statusIcon: {
type: String,
required: true,
},
popoverOptions: {
type: Object,
required: true,
},
},
computed: {
iconStatus() {
return {
group: this.statusIcon,
icon: `status_${this.statusIcon}`,
};
},
},
};
</script>
<template>
<div class="report-block-list-issue prepend-left-default append-right-default">
<div class="report-block-list-icon append-right-10 prepend-left-5">
<ci-icon :status="iconStatus" />
</div>
<div class="report-block-list-issue-description prepend-top-5 append-bottom-5">
<div class="report-block-list-issue-description-text append-right-5">
{{ summary }}
</div>
<popover :options="popoverOptions" />
</div>
</div>
</template>
...@@ -32,6 +32,10 @@ module Geo ...@@ -32,6 +32,10 @@ module Geo
end end
end end
def count_registry_attachments
Geo::FileRegistry.attachments.count
end
def find_synced_attachments def find_synced_attachments
if use_legacy_queries? if use_legacy_queries?
legacy_find_synced_attachments legacy_find_synced_attachments
...@@ -69,6 +73,17 @@ module Geo ...@@ -69,6 +73,17 @@ module Geo
relation.limit(batch_size) relation.limit(batch_size)
end end
def find_migrated_local_attachments(batch_size:, except_file_ids: [])
relation =
if use_legacy_queries?
legacy_find_migrated_local_attachments(except_file_ids: except_file_ids)
else
fdw_find_migrated_local_attachments(except_file_ids: except_file_ids)
end
relation.limit(batch_size)
end
private private
def group_uploads def group_uploads
...@@ -143,6 +158,13 @@ module Geo ...@@ -143,6 +158,13 @@ module Geo
Geo::Fdw::Upload.table_name Geo::Fdw::Upload.table_name
end end
def fdw_find_migrated_local_attachments(except_file_ids:)
fdw_attachments.joins("INNER JOIN file_registry ON file_registry.file_id = #{fdw_attachments_table}.id")
.with_files_stored_remotely
.merge(Geo::FileRegistry.attachments)
.where.not(id: except_file_ids)
end
# #
# Legacy accessors (non FDW) # Legacy accessors (non FDW)
# #
...@@ -172,5 +194,15 @@ module Geo ...@@ -172,5 +194,15 @@ module Geo
Upload Upload
) )
end end
def legacy_find_migrated_local_attachments(except_file_ids:)
registry_file_ids = Geo::FileRegistry.attachments.pluck(:file_id) - except_file_ids
legacy_inner_join_registry_ids(
attachments.with_files_stored_remotely,
registry_file_ids,
Upload
)
end
end end
end end
module Geo module Geo
class JobArtifactRegistryFinder < FileRegistryFinder class JobArtifactRegistryFinder < FileRegistryFinder
def count_job_artifacts def count_local_job_artifacts
local_job_artifacts.count local_job_artifacts.count
end end
...@@ -20,6 +20,10 @@ module Geo ...@@ -20,6 +20,10 @@ module Geo
end end
end end
def count_registry_job_artifacts
Geo::FileRegistry.job_artifacts.count
end
# Find limited amount of non replicated lfs objects. # Find limited amount of non replicated lfs objects.
# #
# You can pass a list with `except_file_ids:` so you can exclude items you # You can pass a list with `except_file_ids:` so you can exclude items you
...@@ -41,6 +45,17 @@ module Geo ...@@ -41,6 +45,17 @@ module Geo
relation.limit(batch_size) relation.limit(batch_size)
end end
def find_migrated_local_job_artifacts(batch_size:, except_file_ids: [])
relation =
if use_legacy_queries?
legacy_find_migrated_local_job_artifacts(except_file_ids: except_file_ids)
else
fdw_find_migrated_local_job_artifacts(except_file_ids: except_file_ids)
end
relation.limit(batch_size)
end
def job_artifacts def job_artifacts
if selective_sync? if selective_sync?
Ci::JobArtifact.joins(:project).where(projects: { id: current_node.projects }) Ci::JobArtifact.joins(:project).where(projects: { id: current_node.projects })
...@@ -90,6 +105,13 @@ module Geo ...@@ -90,6 +105,13 @@ module Geo
.where.not(id: except_file_ids) .where.not(id: except_file_ids)
end end
def fdw_find_migrated_local_job_artifacts(except_file_ids:)
fdw_job_artifacts.joins("INNER JOIN file_registry ON file_registry.file_id = #{fdw_job_artifacts_table}.id")
.with_files_stored_remotely
.where.not(id: except_file_ids)
.merge(Geo::FileRegistry.job_artifacts)
end
def fdw_job_artifacts def fdw_job_artifacts
if selective_sync? if selective_sync?
Geo::Fdw::Ci::JobArtifact.joins(:project).where(projects: { id: current_node.projects }) Geo::Fdw::Ci::JobArtifact.joins(:project).where(projects: { id: current_node.projects })
...@@ -131,5 +153,15 @@ module Geo ...@@ -131,5 +153,15 @@ module Geo
Ci::JobArtifact Ci::JobArtifact
) )
end end
def legacy_find_migrated_local_job_artifacts(except_file_ids:)
registry_file_ids = Geo::FileRegistry.job_artifacts.pluck(:file_id) - except_file_ids
legacy_inner_join_registry_ids(
job_artifacts.with_files_stored_remotely,
registry_file_ids,
Ci::JobArtifact
)
end
end end
end end
module Geo module Geo
class LfsObjectRegistryFinder < FileRegistryFinder class LfsObjectRegistryFinder < FileRegistryFinder
def count_lfs_objects def count_local_lfs_objects
local_lfs_objects.count local_lfs_objects.count
end end
...@@ -20,6 +20,10 @@ module Geo ...@@ -20,6 +20,10 @@ module Geo
end end
end end
def count_registry_lfs_objects
Geo::FileRegistry.lfs_objects.count
end
# Find limited amount of non replicated lfs objects. # Find limited amount of non replicated lfs objects.
# #
# You can pass a list with `except_file_ids:` so you can exclude items you # You can pass a list with `except_file_ids:` so you can exclude items you
...@@ -41,6 +45,17 @@ module Geo ...@@ -41,6 +45,17 @@ module Geo
relation.limit(batch_size) relation.limit(batch_size)
end end
def find_migrated_local_lfs_objects(batch_size:, except_file_ids: [])
relation =
if use_legacy_queries?
legacy_find_migrated_local_lfs_objects(except_file_ids: except_file_ids)
else
fdw_find_migrated_local_lfs_objects(except_file_ids: except_file_ids)
end
relation.limit(batch_size)
end
def lfs_objects def lfs_objects
if selective_sync? if selective_sync?
LfsObject.joins(:projects).where(projects: { id: current_node.projects }) LfsObject.joins(:projects).where(projects: { id: current_node.projects })
...@@ -90,6 +105,13 @@ module Geo ...@@ -90,6 +105,13 @@ module Geo
.where.not(id: except_file_ids) .where.not(id: except_file_ids)
end end
def fdw_find_migrated_local_lfs_objects(except_file_ids:)
fdw_lfs_objects.joins("INNER JOIN file_registry ON file_registry.file_id = #{fdw_lfs_objects_table}.id")
.with_files_stored_remotely
.where.not(id: except_file_ids)
.merge(Geo::FileRegistry.lfs_objects)
end
def fdw_lfs_objects def fdw_lfs_objects
if selective_sync? if selective_sync?
Geo::Fdw::LfsObject.joins(:project).where(projects: { id: current_node.projects }) Geo::Fdw::LfsObject.joins(:project).where(projects: { id: current_node.projects })
...@@ -131,5 +153,15 @@ module Geo ...@@ -131,5 +153,15 @@ module Geo
LfsObject LfsObject
) )
end end
def legacy_find_migrated_local_lfs_objects(except_file_ids:)
registry_file_ids = Geo::FileRegistry.lfs_objects.pluck(:file_id) - except_file_ids
legacy_inner_join_registry_ids(
lfs_objects.with_files_stored_remotely,
registry_file_ids,
LfsObject
)
end
end end
end end
...@@ -5,6 +5,7 @@ module Geo ...@@ -5,6 +5,7 @@ module Geo
self.table_name = Gitlab::Geo::Fdw.table('ci_job_artifacts') self.table_name = Gitlab::Geo::Fdw.table('ci_job_artifacts')
scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) } scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: JobArtifactUploader::Store::REMOTE) }
end end
end end
end end
......
...@@ -5,5 +5,4 @@ class Geo::FileRegistry < Geo::BaseRegistry ...@@ -5,5 +5,4 @@ class Geo::FileRegistry < Geo::BaseRegistry
scope :lfs_objects, -> { where(file_type: :lfs) } scope :lfs_objects, -> { where(file_type: :lfs) }
scope :job_artifacts, -> { where(file_type: :job_artifact) } scope :job_artifacts, -> { where(file_type: :job_artifact) }
scope :attachments, -> { where(file_type: Geo::FileService::DEFAULT_OBJECT_TYPES) } scope :attachments, -> { where(file_type: Geo::FileService::DEFAULT_OBJECT_TYPES) }
scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
end end
...@@ -13,6 +13,7 @@ class GeoNodeStatus < ActiveRecord::Base ...@@ -13,6 +13,7 @@ class GeoNodeStatus < ActiveRecord::Base
:repository_created_max_id, :repository_updated_max_id, :repository_created_max_id, :repository_updated_max_id,
:repository_deleted_max_id, :repository_renamed_max_id, :repositories_changed_max_id, :repository_deleted_max_id, :repository_renamed_max_id, :repositories_changed_max_id,
:lfs_object_deleted_max_id, :job_artifact_deleted_max_id, :lfs_object_deleted_max_id, :job_artifact_deleted_max_id,
:lfs_objects_registry_count, :job_artifacts_registry_count, :attachments_registry_count,
:hashed_storage_migrated_max_id, :hashed_storage_attachments_max_id :hashed_storage_migrated_max_id, :hashed_storage_attachments_max_id
# Be sure to keep this consistent with Prometheus naming conventions # Be sure to keep this consistent with Prometheus naming conventions
...@@ -31,12 +32,15 @@ class GeoNodeStatus < ActiveRecord::Base ...@@ -31,12 +32,15 @@ class GeoNodeStatus < ActiveRecord::Base
lfs_objects_count: 'Total number of local LFS objects available on primary', lfs_objects_count: 'Total number of local LFS objects available on primary',
lfs_objects_synced_count: 'Number of local LFS objects synced on secondary', lfs_objects_synced_count: 'Number of local LFS objects synced on secondary',
lfs_objects_failed_count: 'Number of local LFS objects failed to sync on secondary', lfs_objects_failed_count: 'Number of local LFS objects failed to sync on secondary',
lfs_objects_registry_count: 'Number of LFS objects in the registry',
job_artifacts_count: 'Total number of local job artifacts available on primary', job_artifacts_count: 'Total number of local job artifacts available on primary',
job_artifacts_synced_count: 'Number of local job artifacts synced on secondary', job_artifacts_synced_count: 'Number of local job artifacts synced on secondary',
job_artifacts_failed_count: 'Number of local job artifacts failed to sync on secondary', job_artifacts_failed_count: 'Number of local job artifacts failed to sync on secondary',
job_artifacts_registry_count: 'Number of job artifacts in the registry',
attachments_count: 'Total number of local file attachments available on primary', attachments_count: 'Total number of local file attachments available on primary',
attachments_synced_count: 'Number of local file attachments synced on secondary', attachments_synced_count: 'Number of local file attachments synced on secondary',
attachments_failed_count: 'Number of local file attachments failed to sync on secondary', attachments_failed_count: 'Number of local file attachments failed to sync on secondary',
attachments_registry_count: 'Number of attachments in the registry',
replication_slots_count: 'Total number of replication slots on the primary', replication_slots_count: 'Total number of replication slots on the primary',
replication_slots_used_count: 'Number of replication slots in use on the primary', replication_slots_used_count: 'Number of replication slots in use on the primary',
replication_slots_max_retained_wal_bytes: 'Maximum number of bytes retained in the WAL on the primary', replication_slots_max_retained_wal_bytes: 'Maximum number of bytes retained in the WAL on the primary',
...@@ -107,8 +111,8 @@ class GeoNodeStatus < ActiveRecord::Base ...@@ -107,8 +111,8 @@ class GeoNodeStatus < ActiveRecord::Base
self.last_event_date = latest_event&.created_at self.last_event_date = latest_event&.created_at
self.repositories_count = projects_finder.count_repositories self.repositories_count = projects_finder.count_repositories
self.wikis_count = projects_finder.count_wikis self.wikis_count = projects_finder.count_wikis
self.lfs_objects_count = lfs_objects_finder.count_lfs_objects self.lfs_objects_count = lfs_objects_finder.count_local_lfs_objects
self.job_artifacts_count = job_artifacts_finder.count_job_artifacts self.job_artifacts_count = job_artifacts_finder.count_local_job_artifacts
self.attachments_count = attachments_finder.count_local_attachments self.attachments_count = attachments_finder.count_local_attachments
self.last_successful_status_check_at = Time.now self.last_successful_status_check_at = Time.now
self.storage_shards = StorageShard.all self.storage_shards = StorageShard.all
...@@ -162,10 +166,13 @@ class GeoNodeStatus < ActiveRecord::Base ...@@ -162,10 +166,13 @@ class GeoNodeStatus < ActiveRecord::Base
self.wikis_verification_failed_count = projects_finder.count_verification_failed_wikis self.wikis_verification_failed_count = projects_finder.count_verification_failed_wikis
self.lfs_objects_synced_count = lfs_objects_finder.count_synced_lfs_objects self.lfs_objects_synced_count = lfs_objects_finder.count_synced_lfs_objects
self.lfs_objects_failed_count = lfs_objects_finder.count_failed_lfs_objects self.lfs_objects_failed_count = lfs_objects_finder.count_failed_lfs_objects
self.lfs_objects_registry_count = lfs_objects_finder.count_registry_lfs_objects
self.job_artifacts_synced_count = job_artifacts_finder.count_synced_job_artifacts self.job_artifacts_synced_count = job_artifacts_finder.count_synced_job_artifacts
self.job_artifacts_failed_count = job_artifacts_finder.count_failed_job_artifacts self.job_artifacts_failed_count = job_artifacts_finder.count_failed_job_artifacts
self.job_artifacts_registry_count = job_artifacts_finder.count_registry_job_artifacts
self.attachments_synced_count = attachments_finder.count_synced_attachments self.attachments_synced_count = attachments_finder.count_synced_attachments
self.attachments_failed_count = attachments_finder.count_failed_attachments self.attachments_failed_count = attachments_finder.count_failed_attachments
self.attachments_registry_count = attachments_finder.count_registry_attachments
end end
end end
......
...@@ -19,7 +19,8 @@ module EE ...@@ -19,7 +19,8 @@ module EE
condition(:classification_label_authorized, score: 32) do condition(:classification_label_authorized, score: 32) do
EE::Gitlab::ExternalAuthorization.access_allowed?( EE::Gitlab::ExternalAuthorization.access_allowed?(
@user, @user,
@subject.external_authorization_classification_label @subject.external_authorization_classification_label,
@subject.full_path
) )
end end
......
...@@ -47,6 +47,6 @@ module ExclusiveLeaseGuard ...@@ -47,6 +47,6 @@ module ExclusiveLeaseGuard
end end
def log_error(message, extra_args = {}) def log_error(message, extra_args = {})
logger.error(messages) logger.error(message)
end end
end end
...@@ -3,9 +3,10 @@ module Geo ...@@ -3,9 +3,10 @@ module Geo
LEASE_TIMEOUT = 8.hours.freeze LEASE_TIMEOUT = 8.hours.freeze
include Delay include Delay
include ExclusiveLeaseGuard
def execute def execute
try_obtain_lease do |lease| try_obtain_lease do
start_time = Time.now start_time = Time.now
bytes_downloaded = downloader.execute bytes_downloaded = downloader.execute
success = (bytes_downloaded.present? && bytes_downloaded >= 0) success = (bytes_downloaded.present? && bytes_downloaded >= 0)
...@@ -27,18 +28,6 @@ module Geo ...@@ -27,18 +28,6 @@ module Geo
raise raise
end end
def try_obtain_lease
uuid = Gitlab::ExclusiveLease.new(lease_key, timeout: LEASE_TIMEOUT).try_obtain
return unless uuid.present?
begin
yield
ensure
Gitlab::ExclusiveLease.cancel(lease_key, uuid)
end
end
def update_registry(bytes_downloaded, success:) def update_registry(bytes_downloaded, success:)
transfer = Geo::FileRegistry.find_or_initialize_by( transfer = Geo::FileRegistry.find_or_initialize_by(
file_type: object_type, file_type: object_type,
...@@ -60,5 +49,9 @@ module Geo ...@@ -60,5 +49,9 @@ module Geo
def lease_key def lease_key
"file_download_service:#{object_type}:#{object_db_id}" "file_download_service:#{object_type}:#{object_db_id}"
end end
def lease_timeout
LEASE_TIMEOUT
end
end end
end end
module Geo
class FileRegistryRemovalService < FileService
include ::Gitlab::Utils::StrongMemoize
LEASE_TIMEOUT = 8.hours.freeze
def execute
log_info('Executing')
try_obtain_lease do
log_info('Lease obtained')
unless file_registry
log_error('Could not find file_registry', type: object_type, id: object_db_id)
return
end
if File.exist?(file_path)
log_info('Unlinking file', file_path: file_path)
File.unlink(file_path)
end
log_info('Removing file registry', file_registry_id: file_registry.id)
file_registry.destroy
log_info('Local file & registry removed')
end
rescue SystemCallError
log_error('Could not remove file', e.message)
raise
end
private
def file_registry
strong_memoize(:file_registry) do
::Geo::FileRegistry.find_by(file_type: object_type, file_id: object_db_id)
end
end
def file_path
strong_memoize(:file_path) do
# When local storage is used, just rely on the existing methods
next file_uploader.file.path if file_uploader.object_store == ObjectStorage::Store::LOCAL
# For remote storage more juggling is needed to actually get the full path on disk
if upload?
upload = file_uploader.upload
file_uploader.class.absolute_path(upload)
else
file_uploader.class.absolute_path(file_uploader.file)
end
end
end
def file_uploader
strong_memoize(:file_uploader) do
case object_type.to_s
when 'lfs'
LfsObject.find_by!(id: object_db_id).file
when 'job_artifact'
Ci::JobArtifact.find_by!(id: object_db_id).file
when *Geo::FileService::DEFAULT_OBJECT_TYPES
Upload.find_by!(id: object_db_id).build_uploader
else
raise NameError, "Unrecognized type: #{object_type}"
end
end
rescue NameError, ActiveRecord::RecordNotFound => err
log_error('Could not build uploader', err.message)
raise
end
def upload?
Geo::FileService::DEFAULT_OBJECT_TYPES.include?(object_type)
end
def lease_key
"file_registry_removal_service:#{object_type}:#{object_db_id}"
end
def lease_timeout
LEASE_TIMEOUT
end
end
end
module Geo module Geo
class FileService class FileService
include ExclusiveLeaseGuard
include ::Gitlab::Geo::LogHelpers
attr_reader :object_type, :object_db_id attr_reader :object_type, :object_db_id
DEFAULT_OBJECT_TYPES = %w[attachment avatar file namespace_file personal_file].freeze DEFAULT_OBJECT_TYPES = %w[attachment avatar file namespace_file personal_file].freeze
...@@ -27,19 +30,7 @@ module Geo ...@@ -27,19 +30,7 @@ module Geo
klass_name.camelize klass_name.camelize
end end
def log_info(message, details = {}) def base_log_data(message)
data = log_base_data(message)
data.merge!(details) if details
Gitlab::Geo::Logger.info(data)
end
def log_error(message, error)
data = log_base_data(message)
data[:error] = error
Gitlab::Geo::Logger.error(data)
end
def log_base_data(message)
{ {
class: self.class.name, class: self.class.name,
object_type: object_type, object_type: object_type,
......
...@@ -13,10 +13,10 @@ ...@@ -13,10 +13,10 @@
= link_to icon('question-circle'), help_page_path('user/admin_area/settings/external_authorization') = link_to icon('question-circle'), help_page_path('user/admin_area/settings/external_authorization')
.form-group .form-group
= f.label :external_authorization_service_url, _('Service URL'), class: 'control-label col-sm-2' = f.label :external_authorization_service_url, _('Service URL'), class: 'control-label col-sm-2'
%span.help-block
= external_authorization_url_help_text
.col-sm-10 .col-sm-10
= f.text_field :external_authorization_service_url, class: 'form-control' = f.text_field :external_authorization_service_url, class: 'form-control'
%span.help-block
= external_authorization_url_help_text
.form-group .form-group
= f.label :external_authorization_service_timeout, _('External authorization request timeout'), class: 'control-label col-sm-2' = f.label :external_authorization_service_timeout, _('External authorization request timeout'), class: 'control-label col-sm-2'
.col-sm-10 .col-sm-10
......
...@@ -8,7 +8,7 @@ module Geo ...@@ -8,7 +8,7 @@ module Geo
current_node.files_max_capacity current_node.files_max_capacity
end end
def schedule_job(object_db_id, object_type) def schedule_job(object_type, object_db_id)
job_id = FileDownloadWorker.perform_async(object_type, object_db_id) job_id = FileDownloadWorker.perform_async(object_type, object_db_id)
{ id: object_db_id, type: object_type, job_id: job_id } if job_id { id: object_db_id, type: object_type, job_id: job_id } if job_id
...@@ -55,24 +55,24 @@ module Geo ...@@ -55,24 +55,24 @@ module Geo
def find_unsynced_lfs_objects_ids(batch_size:) def find_unsynced_lfs_objects_ids(batch_size:)
lfs_objects_finder.find_unsynced_lfs_objects(batch_size: batch_size, except_file_ids: scheduled_file_ids(:lfs)) lfs_objects_finder.find_unsynced_lfs_objects(batch_size: batch_size, except_file_ids: scheduled_file_ids(:lfs))
.pluck(:id) .pluck(:id)
.map { |id| [id, :lfs] } .map { |id| [:lfs, id] }
end end
def find_unsynced_attachments_ids(batch_size:) def find_unsynced_attachments_ids(batch_size:)
attachments_finder.find_unsynced_attachments(batch_size: batch_size, except_file_ids: scheduled_file_ids(Geo::FileService::DEFAULT_OBJECT_TYPES)) attachments_finder.find_unsynced_attachments(batch_size: batch_size, except_file_ids: scheduled_file_ids(Geo::FileService::DEFAULT_OBJECT_TYPES))
.pluck(:id, :uploader) .pluck(:uploader, :id)
.map { |id, uploader| [id, uploader.sub(/Uploader\z/, '').underscore] } .map { |uploader, id| [uploader.sub(/Uploader\z/, '').underscore, id] }
end end
def find_unsynced_job_artifacts_ids(batch_size:) def find_unsynced_job_artifacts_ids(batch_size:)
job_artifacts_finder.find_unsynced_job_artifacts(batch_size: batch_size, except_file_ids: scheduled_file_ids(:job_artifact)) job_artifacts_finder.find_unsynced_job_artifacts(batch_size: batch_size, except_file_ids: scheduled_file_ids(:job_artifact))
.pluck(:id) .pluck(:id)
.map { |id| [id, :job_artifact] } .map { |id| [:job_artifact, id] }
end end
def find_failed_upload_object_ids(batch_size:) def find_failed_upload_object_ids(batch_size:)
file_registry_finder.find_failed_file_registries(batch_size: batch_size) file_registry_finder.find_failed_file_registries(batch_size: batch_size)
.pluck(:file_id, :file_type) .pluck(:file_type, :file_id)
end end
def scheduled_file_ids(file_types) def scheduled_file_ids(file_types)
......
module Geo
class FileRegistryRemovalWorker
include ApplicationWorker
include GeoQueue
include ::Gitlab::Geo::LogHelpers
def perform(object_type, object_db_id)
log_info('Executing Geo::FileRegistryRemovalService', id: object_db_id, type: object_type)
::Geo::FileRegistryRemovalService.new(object_type, object_db_id).execute
end
end
end
module Geo
class MigratedLocalFilesCleanUpWorker < ::Geo::Scheduler::Secondary::SchedulerWorker
include ::CronjobQueue
MAX_CAPACITY = 1000
def perform
# No need to run when nothing is configured to be in Object Storage
return unless attachments_object_store_enabled? ||
lfs_objects_object_store_enabled? ||
job_artifacts_object_store_enabled?
super
end
private
def max_capacity
MAX_CAPACITY
end
def schedule_job(object_type, object_db_id)
job_id = ::Geo::FileRegistryRemovalWorker.perform_async(object_type, object_db_id)
if job_id
retval = { id: object_db_id, type: object_type, job_id: job_id }
log_info('Scheduled Geo::FileRegistryRemovalWorker', retval)
retval
end
end
def load_pending_resources
find_migrated_local_objects(batch_size: db_retrieve_batch_size)
end
def find_migrated_local_objects(batch_size:)
lfs_object_ids = find_migrated_local_lfs_objects_ids(batch_size: batch_size)
attachment_ids = find_migrated_local_attachments_ids(batch_size: batch_size)
job_artifact_ids = find_migrated_local_job_artifacts_ids(batch_size: batch_size)
take_batch(lfs_object_ids, attachment_ids, job_artifact_ids)
end
def find_migrated_local_lfs_objects_ids(batch_size:)
return [] unless lfs_objects_object_store_enabled?
lfs_objects_finder.find_migrated_local_lfs_objects(batch_size: batch_size, except_file_ids: scheduled_file_ids(:lfs))
.pluck(:id)
.map { |id| [:lfs, id] }
end
def find_migrated_local_attachments_ids(batch_size:)
return [] unless attachments_object_store_enabled?
attachments_finder.find_migrated_local_attachments(batch_size: batch_size, except_file_ids: scheduled_file_ids(Geo::FileService::DEFAULT_OBJECT_TYPES))
.pluck(:uploader, :id)
.map { |uploader, id| [uploader.sub(/Uploader\z/, '').underscore, id] }
end
def find_migrated_local_job_artifacts_ids(batch_size:)
return [] unless job_artifacts_object_store_enabled?
job_artifacts_finder.find_migrated_local_job_artifacts(batch_size: batch_size, except_file_ids: scheduled_file_ids(:job_artifact))
.pluck(:id)
.map { |id| [:job_artifact, id] }
end
def scheduled_file_ids(file_types)
file_types = Array(file_types)
scheduled_jobs.select { |data| file_types.include?(data[:type]) }.map { |data| data[:id] }
end
def attachments_object_store_enabled?
FileUploader.object_store_enabled?
end
def lfs_objects_object_store_enabled?
LfsObjectUploader.object_store_enabled?
end
def job_artifacts_object_store_enabled?
JobArtifactUploader.object_store_enabled?
end
def attachments_finder
@attachments_finder ||= AttachmentRegistryFinder.new(current_node: current_node)
end
def lfs_objects_finder
@lfs_objects_finder ||= LfsObjectRegistryFinder.new(current_node: current_node)
end
def job_artifacts_finder
@job_artifacts_finder ||= JobArtifactRegistryFinder.new(current_node: current_node)
end
end
end
...@@ -3,8 +3,15 @@ module Geo ...@@ -3,8 +3,15 @@ module Geo
module Secondary module Secondary
class PerShardSchedulerWorker < Geo::Scheduler::PerShardSchedulerWorker class PerShardSchedulerWorker < Geo::Scheduler::PerShardSchedulerWorker
def perform def perform
return unless Gitlab::Geo.geo_database_configured? unless Gitlab::Geo.geo_database_configured?
return unless Gitlab::Geo.secondary? log_info('Geo database not configured')
return
end
unless Gitlab::Geo.secondary?
log_info('Current node not a secondary')
return
end
super super
end end
......
...@@ -3,8 +3,15 @@ module Geo ...@@ -3,8 +3,15 @@ module Geo
module Secondary module Secondary
class SchedulerWorker < Geo::Scheduler::SchedulerWorker class SchedulerWorker < Geo::Scheduler::SchedulerWorker
def perform def perform
return unless Gitlab::Geo.geo_database_configured? unless Gitlab::Geo.geo_database_configured?
return unless Gitlab::Geo.secondary? log_info('Geo database not configured')
return
end
unless Gitlab::Geo.secondary?
log_info('Current node not a secondary')
return
end
super super
end end
......
---
title: Log every access when external authorization is enabled
merge_request: 5117
author:
type: added
---
title: Geo ensure files moved to object storage are cleaned up
merge_request: 4689
author:
type: added
...@@ -5,29 +5,36 @@ module EE ...@@ -5,29 +5,36 @@ module EE
RequestFailed = Class.new(StandardError) RequestFailed = Class.new(StandardError)
def self.access_allowed?(user, label) def self.access_allowed?(user, label, project_path = nil)
return true unless perform_check? return true unless perform_check?
return false unless user return false unless user
access_for_user_to_label(user, label).has_access? access_for_user_to_label(user, label, project_path).has_access?
end end
def self.rejection_reason(user, label) def self.rejection_reason(user, label)
return nil unless enabled? return nil unless enabled?
return nil unless user return nil unless user
access_for_user_to_label(user, label).reason access_for_user_to_label(user, label, nil).reason
end end
def self.access_for_user_to_label(user, label) def self.access_for_user_to_label(user, label, project_path)
if RequestStore.active? if RequestStore.active?
RequestStore.fetch("external_authorisation:user-#{user.id}:label-#{label}") do RequestStore.fetch("external_authorisation:user-#{user.id}:label-#{label}") do
EE::Gitlab::ExternalAuthorization::Access.new(user, label).load! load_access(user, label, project_path)
end end
else else
EE::Gitlab::ExternalAuthorization::Access.new(user, label).load! load_access(user, label, project_path)
end end
end end
def self.load_access(user, label, project_path)
access = EE::Gitlab::ExternalAuthorization::Access.new(user, label).load!
::EE::Gitlab::ExternalAuthorization::Logger.log_access(access, project_path)
access
end
end end
end end
end end
...@@ -2,7 +2,11 @@ module EE ...@@ -2,7 +2,11 @@ module EE
module Gitlab module Gitlab
module ExternalAuthorization module ExternalAuthorization
class Access class Access
attr_reader :access, :reason, :loaded_at attr_reader :user,
:reason,
:loaded_at,
:label,
:load_type
def initialize(user, label) def initialize(user, label)
@user, @label = user, label @user, @label = user, label
...@@ -25,10 +29,12 @@ module EE ...@@ -25,10 +29,12 @@ module EE
private private
def load_from_cache def load_from_cache
@load_type = :cache
@access, @reason, @loaded_at = cache.load @access, @reason, @loaded_at = cache.load
end end
def load_from_service def load_from_service
@load_type = :request
response = Client.new(@user, @label).request_access response = Client.new(@user, @label).request_access
@access = response.successful? @access = response.successful?
@reason = response.reason @reason = response.reason
......
module EE
module Gitlab
module ExternalAuthorization
class Logger < ::Gitlab::Logger
def self.log_access(access, project_path)
status = access.has_access? ? "GRANTED" : "DENIED"
message = "#{status} #{access.user.email} access to '#{access.label}'"
message << " (#{project_path})" if project_path.present?
message << " - #{access.load_type} #{access.loaded_at}" if access.load_type == :cache
info(message)
end
def self.file_name_noext
'external-policy-access-control'
end
end
end
end
end
...@@ -12,6 +12,7 @@ module Gitlab ...@@ -12,6 +12,7 @@ module Gitlab
geo_repository_sync_worker geo_repository_sync_worker
geo_file_download_dispatch_worker geo_file_download_dispatch_worker
geo_repository_verification_secondary_scheduler_worker geo_repository_verification_secondary_scheduler_worker
geo_migrated_local_files_clean_up_worker
].freeze ].freeze
GEO_JOBS = (COMMON_JOBS + PRIMARY_JOBS + SECONDARY_JOBS).freeze GEO_JOBS = (COMMON_JOBS + PRIMARY_JOBS + SECONDARY_JOBS).freeze
......
...@@ -4,17 +4,13 @@ FactoryBot.define do ...@@ -4,17 +4,13 @@ FactoryBot.define do
file_type :file file_type :file
success true success true
trait :avatar do trait(:attachment) { file_type :attachment }
file_type :avatar trait(:avatar) { file_type :avatar }
end trait(:file) { file_type :file }
trait(:job_artifact) { file_type :job_artifact }
trait :lfs do trait(:lfs) { file_type :lfs }
file_type :lfs trait(:namespace_file) { file_type :namespace_file }
end trait(:personal_file) { file_type :personal_file }
trait :job_artifact do
file_type :job_artifact
end
trait :with_file do trait :with_file do
after(:build, :stub) do |registry, _| after(:build, :stub) do |registry, _|
......
require 'spec_helper' require 'spec_helper'
feature 'Edit group settings', :js do feature 'Edit group settings', :js do
include Select2Helper
given(:user) { create(:user) } given(:user) { create(:user) }
given(:group) { create(:group, path: 'foo') } given(:group) { create(:group, path: 'foo') }
...@@ -21,6 +23,18 @@ feature 'Edit group settings', :js do ...@@ -21,6 +23,18 @@ feature 'Edit group settings', :js do
visit group_ldap_group_links_path(group) visit group_ldap_group_links_path(group)
end end
scenario 'adds new LDAP synchronization', :js do
page.within('form#new_ldap_group_link') do
select2 'my-group-cn', from: '#ldap_group_link_cn'
select 'Developer', from: 'ldap_group_link_group_access'
click_button 'Add synchronization'
end
expect(page).not_to have_content('No LDAP synchronizations')
expect(page).to have_content('As Developer on ldap server')
end
scenario 'shows the LDAP filter section' do scenario 'shows the LDAP filter section' do
choose('sync_method_filter') choose('sync_method_filter')
...@@ -37,7 +51,7 @@ feature 'Edit group settings', :js do ...@@ -37,7 +51,7 @@ feature 'Edit group settings', :js do
end end
end end
context 'when the LDAP group sync filter feature is available' do context 'when the LDAP group sync filter feature is not available' do
before do before do
stub_licensed_features(ldap_group_sync_filter: false) stub_licensed_features(ldap_group_sync_filter: false)
......
...@@ -78,7 +78,9 @@ describe 'viewing an issue with cross project references' do ...@@ -78,7 +78,9 @@ describe 'viewing an issue with cross project references' do
it 'only hits the external service for the project the user is viewing' do it 'only hits the external service for the project the user is viewing' do
expect(EE::Gitlab::ExternalAuthorization) expect(EE::Gitlab::ExternalAuthorization)
.to receive(:access_allowed?).with(user, 'default_label').at_least(1).and_return(true) .to receive(:access_allowed?).with(user, 'default_label', any_args).at_least(1).and_return(true)
expect(EE::Gitlab::ExternalAuthorization)
.not_to receive(:access_allowed?).with(user, 'other_label', any_args)
visit project_issue_path(project, issue) visit project_issue_path(project, issue)
end end
......
...@@ -11,10 +11,10 @@ describe Geo::AttachmentRegistryFinder, :geo do ...@@ -11,10 +11,10 @@ describe Geo::AttachmentRegistryFinder, :geo do
let(:synced_project) { create(:project, group: synced_group) } let(:synced_project) { create(:project, group: synced_group) }
let(:unsynced_project) { create(:project, group: unsynced_group, repository_storage: 'broken') } let(:unsynced_project) { create(:project, group: unsynced_group, repository_storage: 'broken') }
let!(:upload_1) { create(:upload, model: synced_group) } let(:upload_1) { create(:upload, model: synced_group) }
let!(:upload_2) { create(:upload, model: unsynced_group) } let(:upload_2) { create(:upload, model: unsynced_group) }
let!(:upload_3) { create(:upload, :issuable_upload, model: synced_project) } let(:upload_3) { create(:upload, :issuable_upload, model: synced_project) }
let!(:upload_4) { create(:upload, model: unsynced_project) } let(:upload_4) { create(:upload, model: unsynced_project) }
let(:upload_5) { create(:upload, model: synced_project) } let(:upload_5) { create(:upload, model: synced_project) }
let(:upload_6) { create(:upload, :personal_snippet_upload) } let(:upload_6) { create(:upload, :personal_snippet_upload) }
let(:upload_7) { create(:upload, model: synced_subgroup) } let(:upload_7) { create(:upload, model: synced_subgroup) }
...@@ -188,6 +188,50 @@ describe Geo::AttachmentRegistryFinder, :geo do ...@@ -188,6 +188,50 @@ describe Geo::AttachmentRegistryFinder, :geo do
expect(uploads).to match_ids(upload_2, upload_3, upload_4) expect(uploads).to match_ids(upload_2, upload_3, upload_4)
end end
end end
describe '#find_migrated_local_attachments' do
it 'delegates to the correct method' do
expect(subject).to receive("#{method_prefix}_find_migrated_local_attachments".to_sym).and_call_original
subject.find_migrated_local_attachments(batch_size: 100)
end
it 'returns uploads stored remotely and successfully synced locally' do
upload = create(:upload, :object_storage, model: synced_group)
create(:geo_file_registry, :avatar, file_id: upload.id)
uploads = subject.find_migrated_local_attachments(batch_size: 100)
expect(uploads).to match_ids(upload)
end
it 'excludes uploads stored remotely, but not synced yet' do
create(:upload, :object_storage, model: synced_group)
uploads = subject.find_migrated_local_attachments(batch_size: 100)
expect(uploads).to be_empty
end
it 'excludes synced uploads that are stored locally' do
create(:geo_file_registry, :avatar, file_id: upload_5.id)
uploads = subject.find_migrated_local_attachments(batch_size: 100)
expect(uploads).to be_empty
end
it 'excludes except_file_ids' do
upload_a = create(:upload, :object_storage, model: synced_group)
upload_b = create(:upload, :object_storage, model: unsynced_group)
create(:geo_file_registry, :avatar, file_id: upload_a.id, success: true)
create(:geo_file_registry, :avatar, file_id: upload_b.id, success: true)
uploads = subject.find_migrated_local_attachments(batch_size: 10, except_file_ids: [upload_a.id])
expect(uploads).to match_ids(upload_b)
end
end
end end
# Disable transactions via :delete method because a foreign table # Disable transactions via :delete method because a foreign table
......
...@@ -8,15 +8,18 @@ describe Geo::JobArtifactRegistryFinder, :geo do ...@@ -8,15 +8,18 @@ describe Geo::JobArtifactRegistryFinder, :geo do
let(:synced_project) { create(:project, group: synced_group) } let(:synced_project) { create(:project, group: synced_group) }
let(:unsynced_project) { create(:project) } let(:unsynced_project) { create(:project) }
let!(:job_artifact_1) { create(:ci_job_artifact, id: 1, project: synced_project) } let(:job_artifact_1) { create(:ci_job_artifact, project: synced_project) }
let!(:job_artifact_2) { create(:ci_job_artifact, id: 2, project: unsynced_project) } let(:job_artifact_2) { create(:ci_job_artifact, project: unsynced_project) }
let!(:job_artifact_3) { create(:ci_job_artifact, id: 3, project: synced_project) } let(:job_artifact_3) { create(:ci_job_artifact, project: synced_project) }
let!(:job_artifact_4) { create(:ci_job_artifact, id: 4, project: unsynced_project) } let(:job_artifact_4) { create(:ci_job_artifact, project: unsynced_project) }
let(:job_artifact_remote_1) { create(:ci_job_artifact, :remote_store, project: synced_project) }
let(:job_artifact_remote_2) { create(:ci_job_artifact, :remote_store, project: unsynced_project) }
subject { described_class.new(current_node: secondary) } subject { described_class.new(current_node: secondary) }
before do before do
stub_current_geo_node(secondary) stub_current_geo_node(secondary)
stub_artifacts_object_storage
end end
describe '#count_synced_job_artifacts' do describe '#count_synced_job_artifacts' do
...@@ -56,15 +59,22 @@ describe Geo::JobArtifactRegistryFinder, :geo do ...@@ -56,15 +59,22 @@ describe Geo::JobArtifactRegistryFinder, :geo do
end end
shared_examples 'counts all the things' do shared_examples 'counts all the things' do
describe '#count_job_artifacts' do describe '#count_local_job_artifacts' do
before do
job_artifact_1
job_artifact_2
job_artifact_3
job_artifact_4
end
it 'counts job artifacts' do it 'counts job artifacts' do
expect(subject.count_job_artifacts).to eq 4 expect(subject.count_local_job_artifacts).to eq 4
end end
it 'ignores remote job artifacts' do it 'ignores remote job artifacts' do
job_artifact_1.update!(file_store: ObjectStorage::Store::REMOTE) job_artifact_1.update!(file_store: ObjectStorage::Store::REMOTE)
expect(subject.count_job_artifacts).to eq 3 expect(subject.count_local_job_artifacts).to eq 3
end end
context 'with selective sync' do context 'with selective sync' do
...@@ -73,13 +83,13 @@ describe Geo::JobArtifactRegistryFinder, :geo do ...@@ -73,13 +83,13 @@ describe Geo::JobArtifactRegistryFinder, :geo do
end end
it 'counts job artifacts' do it 'counts job artifacts' do
expect(subject.count_job_artifacts).to eq 2 expect(subject.count_local_job_artifacts).to eq 2
end end
it 'ignores remote job artifacts' do it 'ignores remote job artifacts' do
job_artifact_1.update!(file_store: ObjectStorage::Store::REMOTE) job_artifact_1.update!(file_store: ObjectStorage::Store::REMOTE)
expect(subject.count_job_artifacts).to eq 1 expect(subject.count_local_job_artifacts).to eq 1
end end
end end
end end
...@@ -94,10 +104,9 @@ describe Geo::JobArtifactRegistryFinder, :geo do ...@@ -94,10 +104,9 @@ describe Geo::JobArtifactRegistryFinder, :geo do
end end
it 'ignores remote job artifacts' do it 'ignores remote job artifacts' do
create(:geo_file_registry, :job_artifact, file_id: job_artifact_1.id) create(:geo_file_registry, :job_artifact, file_id: job_artifact_remote_1.id)
create(:geo_file_registry, :job_artifact, file_id: job_artifact_2.id) create(:geo_file_registry, :job_artifact, file_id: job_artifact_2.id)
create(:geo_file_registry, :job_artifact, file_id: job_artifact_3.id) create(:geo_file_registry, :job_artifact, file_id: job_artifact_3.id)
job_artifact_1.update!(file_store: ObjectStorage::Store::REMOTE)
expect(subject.count_synced_job_artifacts).to eq 2 expect(subject.count_synced_job_artifacts).to eq 2
end end
...@@ -122,10 +131,9 @@ describe Geo::JobArtifactRegistryFinder, :geo do ...@@ -122,10 +131,9 @@ describe Geo::JobArtifactRegistryFinder, :geo do
end end
it 'ignores remote job artifacts' do it 'ignores remote job artifacts' do
create(:geo_file_registry, :job_artifact, file_id: job_artifact_1.id) create(:geo_file_registry, :job_artifact, file_id: job_artifact_remote_1.id)
create(:geo_file_registry, :job_artifact, file_id: job_artifact_2.id) create(:geo_file_registry, :job_artifact, file_id: job_artifact_2.id)
create(:geo_file_registry, :job_artifact, file_id: job_artifact_3.id) create(:geo_file_registry, :job_artifact, file_id: job_artifact_3.id)
job_artifact_1.update!(file_store: ObjectStorage::Store::REMOTE)
expect(subject.count_synced_job_artifacts).to eq 1 expect(subject.count_synced_job_artifacts).to eq 1
end end
...@@ -142,10 +150,9 @@ describe Geo::JobArtifactRegistryFinder, :geo do ...@@ -142,10 +150,9 @@ describe Geo::JobArtifactRegistryFinder, :geo do
end end
it 'ignores remote job artifacts' do it 'ignores remote job artifacts' do
create(:geo_file_registry, :job_artifact, file_id: job_artifact_1.id, success: false) create(:geo_file_registry, :job_artifact, file_id: job_artifact_remote_1.id, success: false)
create(:geo_file_registry, :job_artifact, file_id: job_artifact_2.id, success: false) create(:geo_file_registry, :job_artifact, file_id: job_artifact_2.id, success: false)
create(:geo_file_registry, :job_artifact, file_id: job_artifact_3.id, success: false) create(:geo_file_registry, :job_artifact, file_id: job_artifact_3.id, success: false)
job_artifact_1.update!(file_store: ObjectStorage::Store::REMOTE)
expect(subject.count_failed_job_artifacts).to eq 2 expect(subject.count_failed_job_artifacts).to eq 2
end end
...@@ -212,6 +219,48 @@ describe Geo::JobArtifactRegistryFinder, :geo do ...@@ -212,6 +219,48 @@ describe Geo::JobArtifactRegistryFinder, :geo do
expect(job_artifacts).to match_ids(job_artifact_4) expect(job_artifacts).to match_ids(job_artifact_4)
end end
end end
describe '#find_migrated_local_job_artifacts' do
it 'delegates to the correct method' do
expect(subject).to receive("#{method_prefix}_find_migrated_local_job_artifacts".to_sym).and_call_original
subject.find_migrated_local_job_artifacts(batch_size: 10)
end
it 'returns job artifacts remotely and successfully synced locally' do
job_artifact = create(:ci_job_artifact, :remote_store, project: synced_project)
create(:geo_file_registry, :job_artifact, file_id: job_artifact.id)
job_artifacts = subject.find_migrated_local_job_artifacts(batch_size: 10)
expect(job_artifacts).to match_ids(job_artifact)
end
it 'excludes job artifacts stored remotely, but not synced yet' do
create(:ci_job_artifact, :remote_store, project: synced_project)
job_artifacts = subject.find_migrated_local_job_artifacts(batch_size: 10)
expect(job_artifacts).to be_empty
end
it 'excludes synced job artifacts that are stored locally' do
create(:geo_file_registry, :job_artifact, file_id: job_artifact_1.id)
job_artifacts = subject.find_migrated_local_job_artifacts(batch_size: 10)
expect(job_artifacts).to be_empty
end
it 'excludes except_file_ids' do
create(:geo_file_registry, :job_artifact, file_id: job_artifact_remote_1.id)
create(:geo_file_registry, :job_artifact, file_id: job_artifact_remote_2.id)
job_artifacts = subject.find_migrated_local_job_artifacts(batch_size: 10, except_file_ids: [job_artifact_remote_1.id])
expect(job_artifacts).to match_ids(job_artifact_remote_2)
end
end
end end
# Disable transactions via :delete method because a foreign table # Disable transactions via :delete method because a foreign table
......
...@@ -8,15 +8,18 @@ describe Geo::LfsObjectRegistryFinder, :geo do ...@@ -8,15 +8,18 @@ describe Geo::LfsObjectRegistryFinder, :geo do
let(:synced_project) { create(:project, group: synced_group) } let(:synced_project) { create(:project, group: synced_group) }
let(:unsynced_project) { create(:project) } let(:unsynced_project) { create(:project) }
let!(:lfs_object_1) { create(:lfs_object) } let(:lfs_object_1) { create(:lfs_object) }
let!(:lfs_object_2) { create(:lfs_object) } let(:lfs_object_2) { create(:lfs_object) }
let!(:lfs_object_3) { create(:lfs_object) } let(:lfs_object_3) { create(:lfs_object) }
let!(:lfs_object_4) { create(:lfs_object) } let(:lfs_object_4) { create(:lfs_object) }
let(:lfs_object_remote_1) { create(:lfs_object, :object_storage) }
let(:lfs_object_remote_2) { create(:lfs_object, :object_storage) }
subject { described_class.new(current_node: secondary) } subject { described_class.new(current_node: secondary) }
before do before do
stub_current_geo_node(secondary) stub_current_geo_node(secondary)
stub_lfs_object_storage
end end
describe '#count_synced_lfs_objects' do describe '#count_synced_lfs_objects' do
...@@ -35,10 +38,9 @@ describe Geo::LfsObjectRegistryFinder, :geo do ...@@ -35,10 +38,9 @@ describe Geo::LfsObjectRegistryFinder, :geo do
end end
it 'ignores remote LFS objects' do it 'ignores remote LFS objects' do
create(:geo_file_registry, :lfs, file_id: lfs_object_1.id) create(:geo_file_registry, :lfs, file_id: lfs_object_remote_1.id)
create(:geo_file_registry, :lfs, file_id: lfs_object_2.id) create(:geo_file_registry, :lfs, file_id: lfs_object_2.id)
create(:geo_file_registry, :lfs, file_id: lfs_object_3.id) create(:geo_file_registry, :lfs, file_id: lfs_object_3.id)
lfs_object_1.update_column(:file_store, ObjectStorage::Store::REMOTE)
expect(subject.count_synced_lfs_objects).to eq 2 expect(subject.count_synced_lfs_objects).to eq 2
end end
...@@ -69,10 +71,9 @@ describe Geo::LfsObjectRegistryFinder, :geo do ...@@ -69,10 +71,9 @@ describe Geo::LfsObjectRegistryFinder, :geo do
end end
it 'ignores remote LFS objects' do it 'ignores remote LFS objects' do
create(:geo_file_registry, :lfs, file_id: lfs_object_1.id) create(:geo_file_registry, :lfs, file_id: lfs_object_remote_1.id)
create(:geo_file_registry, :lfs, file_id: lfs_object_2.id) create(:geo_file_registry, :lfs, file_id: lfs_object_2.id)
create(:geo_file_registry, :lfs, file_id: lfs_object_3.id) create(:geo_file_registry, :lfs, file_id: lfs_object_3.id)
lfs_object_1.update_column(:file_store, ObjectStorage::Store::REMOTE)
expect(subject.count_synced_lfs_objects).to eq 1 expect(subject.count_synced_lfs_objects).to eq 1
end end
...@@ -95,10 +96,9 @@ describe Geo::LfsObjectRegistryFinder, :geo do ...@@ -95,10 +96,9 @@ describe Geo::LfsObjectRegistryFinder, :geo do
end end
it 'ignores remote LFS objects' do it 'ignores remote LFS objects' do
create(:geo_file_registry, :lfs, file_id: lfs_object_1.id, success: false) create(:geo_file_registry, :lfs, file_id: lfs_object_remote_1.id, success: false)
create(:geo_file_registry, :lfs, file_id: lfs_object_2.id, success: false) create(:geo_file_registry, :lfs, file_id: lfs_object_2.id, success: false)
create(:geo_file_registry, :lfs, file_id: lfs_object_3.id, success: false) create(:geo_file_registry, :lfs, file_id: lfs_object_3.id, success: false)
lfs_object_1.update_column(:file_store, ObjectStorage::Store::REMOTE)
expect(subject.count_failed_lfs_objects).to eq 2 expect(subject.count_failed_lfs_objects).to eq 2
end end
...@@ -129,10 +129,9 @@ describe Geo::LfsObjectRegistryFinder, :geo do ...@@ -129,10 +129,9 @@ describe Geo::LfsObjectRegistryFinder, :geo do
end end
it 'ignores remote LFS objects' do it 'ignores remote LFS objects' do
create(:geo_file_registry, :lfs, file_id: lfs_object_1.id, success: false) create(:geo_file_registry, :lfs, file_id: lfs_object_remote_1.id, success: false)
create(:geo_file_registry, :lfs, file_id: lfs_object_2.id, success: false) create(:geo_file_registry, :lfs, file_id: lfs_object_2.id, success: false)
create(:geo_file_registry, :lfs, file_id: lfs_object_3.id, success: false) create(:geo_file_registry, :lfs, file_id: lfs_object_3.id, success: false)
lfs_object_1.update_column(:file_store, ObjectStorage::Store::REMOTE)
expect(subject.count_failed_lfs_objects).to eq 1 expect(subject.count_failed_lfs_objects).to eq 1
end end
...@@ -165,6 +164,47 @@ describe Geo::LfsObjectRegistryFinder, :geo do ...@@ -165,6 +164,47 @@ describe Geo::LfsObjectRegistryFinder, :geo do
expect(lfs_objects).to match_ids(lfs_object_4) expect(lfs_objects).to match_ids(lfs_object_4)
end end
end end
describe '#find_migrated_local_lfs_objects' do
it 'delegates to the correct method' do
expect(subject).to receive("#{method_prefix}_find_migrated_local_lfs_objects".to_sym).and_call_original
subject.find_migrated_local_lfs_objects(batch_size: 10)
end
it 'returns LFS objects remotely and successfully synced locally' do
create(:geo_file_registry, :lfs, file_id: lfs_object_remote_1.id)
lfs_objects = subject.find_migrated_local_lfs_objects(batch_size: 10)
expect(lfs_objects).to match_ids(lfs_object_remote_1)
end
it 'excludes LFS objects stored remotely, but not synced yet' do
create(:lfs_object, :object_storage)
lfs_objects = subject.find_migrated_local_lfs_objects(batch_size: 10)
expect(lfs_objects).to be_empty
end
it 'excludes synced LFS objects that are stored locally' do
create(:geo_file_registry, :avatar, file_id: lfs_object_1.id)
lfs_objects = subject.find_migrated_local_lfs_objects(batch_size: 10)
expect(lfs_objects).to be_empty
end
it 'excludes except_file_ids' do
create(:geo_file_registry, :lfs, file_id: lfs_object_remote_1.id)
create(:geo_file_registry, :lfs, file_id: lfs_object_remote_2.id)
lfs_objects = subject.find_migrated_local_lfs_objects(batch_size: 10, except_file_ids: [lfs_object_remote_1.id])
expect(lfs_objects).to match_ids(lfs_object_remote_2)
end
end
end end
# Disable transactions via :delete method because a foreign table # Disable transactions via :delete method because a foreign table
......
require 'spec_helper'
describe EE::Gitlab::ExternalAuthorization::Logger do
let(:request_time) { Time.parse('2018-03-26 20:22:15') }
def fake_access(has_access, user, load_type = :request)
access = double('access')
allow(access).to receive_messages(user: user,
has_access?: has_access,
loaded_at: request_time,
label: 'dummy_label',
load_type: load_type)
access
end
describe '.log_access' do
it 'logs a nice message for an access request' do
expected_message = "GRANTED admin@example.com access to 'dummy_label' (the/project/path)"
fake_access = fake_access(true, build(:user, email: 'admin@example.com'))
expect(described_class).to receive(:info).with(expected_message)
described_class.log_access(fake_access, 'the/project/path')
end
it 'does not trip without a project path' do
expected_message = "DENIED admin@example.com access to 'dummy_label'"
fake_access = fake_access(false, build(:user, email: 'admin@example.com'))
expect(described_class).to receive(:info).with(expected_message)
described_class.log_access(fake_access, nil)
end
it 'adds the load time for cached accesses' do
expected_message = "DENIED admin@example.com access to 'dummy_label' - cache #{request_time}"
fake_access = fake_access(false, build(:user, email: 'admin@example.com'), :cache)
expect(described_class).to receive(:info).with(expected_message)
described_class.log_access(fake_access, nil)
end
end
end
...@@ -21,7 +21,7 @@ describe EE::Gitlab::ExternalAuthorization, :request_store do ...@@ -21,7 +21,7 @@ describe EE::Gitlab::ExternalAuthorization, :request_store do
end end
describe '#rejection_reason' do describe '#rejection_reason' do
it 'is alwaus nil when the feature is disabled' do it 'is always nil when the feature is disabled' do
expect(::Gitlab::CurrentSettings.current_application_settings) expect(::Gitlab::CurrentSettings.current_application_settings)
.to receive(:external_authorization_service_enabled?) { false } .to receive(:external_authorization_service_enabled?) { false }
...@@ -38,7 +38,17 @@ describe EE::Gitlab::ExternalAuthorization, :request_store do ...@@ -38,7 +38,17 @@ describe EE::Gitlab::ExternalAuthorization, :request_store do
expect(EE::Gitlab::ExternalAuthorization::Access) expect(EE::Gitlab::ExternalAuthorization::Access)
.to receive(:new).with(user, label).once.and_call_original .to receive(:new).with(user, label).once.and_call_original
2.times { described_class.access_for_user_to_label(user, label) } 2.times { described_class.access_for_user_to_label(user, label, nil) }
end
it 'logs the access request once per request' do
expect(EE::Gitlab::ExternalAuthorization::Logger)
.to receive(:log_access)
.with(an_instance_of(EE::Gitlab::ExternalAuthorization::Access),
'the/project/path')
.once
2.times { described_class.access_for_user_to_label(user, label, 'the/project/path') }
end end
end end
end end
...@@ -31,6 +31,7 @@ describe Gitlab::Geo::CronManager, :geo do ...@@ -31,6 +31,7 @@ describe Gitlab::Geo::CronManager, :geo do
geo_repository_verification_secondary_scheduler_worker geo_repository_verification_secondary_scheduler_worker
geo_metrics_update_worker geo_metrics_update_worker
geo_prune_event_log_worker geo_prune_event_log_worker
geo_migrated_local_files_clean_up_worker
].freeze ].freeze
before(:all) do before(:all) do
...@@ -49,7 +50,8 @@ describe Gitlab::Geo::CronManager, :geo do ...@@ -49,7 +50,8 @@ describe Gitlab::Geo::CronManager, :geo do
[ [
job('geo_file_download_dispatch_worker'), job('geo_file_download_dispatch_worker'),
job('geo_repository_sync_worker'), job('geo_repository_sync_worker'),
job('geo_repository_verification_secondary_scheduler_worker') job('geo_repository_verification_secondary_scheduler_worker'),
job('geo_migrated_local_files_clean_up_worker')
] ]
end end
......
...@@ -164,16 +164,22 @@ describe ProjectPolicy do ...@@ -164,16 +164,22 @@ describe ProjectPolicy do
end end
it 'prevents all but seeing a public project in a list when access is denied' do it 'prevents all but seeing a public project in a list when access is denied' do
external_service_deny_access(owner, project) [developer, owner, build(:user), nil].each do |user|
external_service_deny_access(developer, project) external_service_deny_access(user, project)
policy = described_class.new(user, project)
[developer, owner, create(:user), nil].each do |user|
policy = described_class.new(owner, project)
expect(policy).not_to be_allowed(:read_project) expect(policy).not_to be_allowed(:read_project)
expect(policy).not_to be_allowed(:owner_access) expect(policy).not_to be_allowed(:owner_access)
expect(policy).not_to be_allowed(:change_namespace) expect(policy).not_to be_allowed(:change_namespace)
end end
end end
it 'passes the full path to external authorization for logging purposes' do
expect(EE::Gitlab::ExternalAuthorization)
.to receive(:access_allowed?).with(owner, 'default_label', project.full_path).and_call_original
described_class.new(owner, project).allowed?(:read_project)
end
end end
end end
end end
require 'spec_helper'
describe Geo::FileRegistryRemovalService do
include ::EE::GeoHelpers
set(:secondary) { create(:geo_node) }
before do
stub_current_geo_node(secondary)
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
end
describe '#execute' do
it 'delegates log_error to the Geo logger' do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(false)
expect(Gitlab::Geo::Logger).to receive(:error)
described_class.new(:lfs, 99).execute
end
shared_examples 'removes' do
subject(:service) { described_class.new(file_registry.file_type, file_registry.file_id) }
it 'file from disk' do
expect do
service.execute
end.to change { File.exist?(file_path) }.from(true).to(false)
end
it 'registry when file was deleted successfully' do
expect do
service.execute
end.to change(Geo::FileRegistry, :count).by(-1)
end
end
context 'with LFS object' do
let!(:lfs_object) { create(:lfs_object, :with_file) }
let!(:file_registry) { create(:geo_file_registry, :lfs, file_id: lfs_object.id) }
let!(:file_path) { lfs_object.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_lfs_object_storage
lfs_object.update_column(:file_store, LfsObjectUploader::Store::REMOTE)
end
it_behaves_like 'removes'
end
end
context 'with job artifact' do
let!(:job_artifact) { create(:ci_job_artifact, :archive) }
let!(:file_registry) { create(:geo_file_registry, :job_artifact, file_id: job_artifact.id) }
let!(:file_path) { job_artifact.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_artifacts_object_storage
job_artifact.update_column(:file_store, JobArtifactUploader::Store::REMOTE)
end
it_behaves_like 'removes'
end
end
context 'with avatar' do
let!(:upload) { create(:user, :with_avatar).avatar.upload }
let!(:file_registry) { create(:geo_file_registry, :avatar, file_id: upload.id) }
let!(:file_path) { upload.build_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(AvatarUploader)
upload.update_column(:store, AvatarUploader::Store::REMOTE)
end
it_behaves_like 'removes'
end
end
context 'with attachment' do
let!(:upload) { create(:note, :with_attachment).attachment.upload }
let!(:file_registry) { create(:geo_file_registry, :attachment, file_id: upload.id) }
let!(:file_path) { upload.build_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(AttachmentUploader)
upload.update_column(:store, AttachmentUploader::Store::REMOTE)
end
it_behaves_like 'removes'
end
end
context 'with file' do # TODO
let!(:upload) { create(:user, :with_avatar).avatar.upload }
let!(:file_registry) { create(:geo_file_registry, :avatar, file_id: upload.id) }
let!(:file_path) { upload.build_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(AvatarUploader)
upload.update_column(:store, AvatarUploader::Store::REMOTE)
end
it_behaves_like 'removes'
end
end
context 'with namespace_file' do
set(:group) { create(:group) }
let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') }
let!(:upload) do
NamespaceFileUploader.new(group).store!(file)
Upload.find_by(model: group, uploader: NamespaceFileUploader)
end
let!(:file_registry) { create(:geo_file_registry, :namespace_file, file_id: upload.id) }
let!(:file_path) { upload.build_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(NamespaceFileUploader)
upload.update_column(:store, NamespaceFileUploader::Store::REMOTE)
end
it_behaves_like 'removes'
end
end
context 'with personal_file' do
let(:snippet) { create(:personal_snippet) }
let(:file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/png') }
let!(:upload) do
PersonalFileUploader.new(snippet).store!(file)
Upload.find_by(model: snippet, uploader: PersonalFileUploader)
end
let!(:file_registry) { create(:geo_file_registry, :personal_file, file_id: upload.id) }
let!(:file_path) { upload.build_uploader.file.path }
it_behaves_like 'removes'
context 'migrated to object storage' do
before do
stub_uploads_object_storage(PersonalFileUploader)
upload.update_column(:store, PersonalFileUploader::Store::REMOTE)
end
it_behaves_like 'removes'
end
end
end
end
...@@ -21,7 +21,7 @@ module ExternalAuthorizationServiceHelpers ...@@ -21,7 +21,7 @@ module ExternalAuthorizationServiceHelpers
allow(EE::Gitlab::ExternalAuthorization) allow(EE::Gitlab::ExternalAuthorization)
.to receive(:access_allowed?) .to receive(:access_allowed?)
.with(user, classification_label) .with(user, classification_label, any_args)
.and_return(allowed) .and_return(allowed)
end end
......
require 'spec_helper'
describe Geo::MigratedLocalFilesCleanUpWorker, :geo do
include ::EE::GeoHelpers
let(:primary) { create(:geo_node, :primary, host: 'primary-geo-node') }
let(:secondary) { create(:geo_node) }
subject(:worker) { described_class.new }
before do
stub_current_geo_node(secondary)
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).and_return(true)
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:renew).and_return(true)
end
shared_examples '#perform' do |skip_tests|
before do
skip('FDW is not configured') if skip_tests
end
it 'does not run when node is disabled' do
secondary.enabled = false
secondary.save
expect(worker).not_to receive(:try_obtain_lease)
worker.perform
end
context 'with LFS objects' do
let(:lfs_object_local) { create(:lfs_object) }
let(:lfs_object_remote) { create(:lfs_object, :object_storage) }
before do
stub_lfs_object_storage
create(:geo_file_registry, :lfs, file_id: lfs_object_local.id)
create(:geo_file_registry, :lfs, file_id: lfs_object_remote.id)
end
it 'schedules job for file stored remotely and synced locally' do
expect(worker).to receive(:schedule_job).with(:lfs, lfs_object_remote.id)
expect(worker).not_to receive(:schedule_job).with(anything, lfs_object_local.id)
worker.perform
end
it 'schedules worker for file stored remotely and synced locally' do
expect(Geo::FileRegistryRemovalWorker).to receive(:perform_async).with(:lfs, lfs_object_remote.id)
expect(Geo::FileRegistryRemovalWorker).not_to receive(:perform_async).with(anything, lfs_object_local.id)
worker.perform
end
end
context 'with attachments' do
let(:avatar_upload) { create(:upload) }
let(:personal_snippet_upload) { create(:upload, :personal_snippet_upload) }
let(:issuable_upload) { create(:upload, :issuable_upload) }
let(:namespace_upload) { create(:upload, :namespace_upload) }
let(:attachment_upload) { create(:upload, :attachment_upload) }
before do
create(:geo_file_registry, :avatar, file_id: avatar_upload.id)
create(:geo_file_registry, :personal_file, file_id: personal_snippet_upload.id)
create(:geo_file_registry, :file, file_id: issuable_upload.id)
create(:geo_file_registry, :namespace_file, file_id: namespace_upload.id)
create(:geo_file_registry, :attachment, file_id: attachment_upload.id)
end
it 'schedules nothing for attachments stored locally' do
expect(worker).not_to receive(:schedule_job).with(anything, avatar_upload.id)
expect(worker).not_to receive(:schedule_job).with(anything, personal_snippet_upload.id)
expect(worker).not_to receive(:schedule_job).with(anything, issuable_upload.id)
expect(worker).not_to receive(:schedule_job).with(anything, namespace_upload.id)
expect(worker).not_to receive(:schedule_job).with(anything, attachment_upload.id)
worker.perform
end
context 'attachments stored remotely' do
before do
stub_uploads_object_storage(AvatarUploader)
stub_uploads_object_storage(PersonalFileUploader)
stub_uploads_object_storage(FileUploader)
stub_uploads_object_storage(NamespaceFileUploader)
stub_uploads_object_storage(AttachmentUploader)
avatar_upload.update_column(:store, FileUploader::Store::REMOTE)
personal_snippet_upload.update_column(:store, FileUploader::Store::REMOTE)
issuable_upload.update_column(:store, FileUploader::Store::REMOTE)
namespace_upload.update_column(:store, FileUploader::Store::REMOTE)
attachment_upload.update_column(:store, FileUploader::Store::REMOTE)
end
it 'schedules jobs for uploads stored remotely and synced locally' do
expect(worker).to receive(:schedule_job).with('avatar', avatar_upload.id)
expect(worker).to receive(:schedule_job).with('personal_file', personal_snippet_upload.id)
expect(worker).to receive(:schedule_job).with('file', issuable_upload.id)
expect(worker).to receive(:schedule_job).with('namespace_file', namespace_upload.id)
expect(worker).to receive(:schedule_job).with('attachment', attachment_upload.id)
worker.perform
end
it 'schedules workers for uploads stored remotely and synced locally' do
expect(Geo::FileRegistryRemovalWorker).to receive(:perform_async).with('avatar', avatar_upload.id)
expect(Geo::FileRegistryRemovalWorker).to receive(:perform_async).with('personal_file', personal_snippet_upload.id)
expect(Geo::FileRegistryRemovalWorker).to receive(:perform_async).with('file', issuable_upload.id)
expect(Geo::FileRegistryRemovalWorker).to receive(:perform_async).with('namespace_file', namespace_upload.id)
expect(Geo::FileRegistryRemovalWorker).to receive(:perform_async).with('attachment', attachment_upload.id)
worker.perform
end
end
end
context 'with job artifacts' do
let(:job_artifact_local) { create(:ci_job_artifact) }
let(:job_artifact_remote) { create(:ci_job_artifact, :remote_store) }
before do
stub_artifacts_object_storage
create(:geo_file_registry, :job_artifact, file_id: job_artifact_local.id)
create(:geo_file_registry, :job_artifact, file_id: job_artifact_remote.id)
end
it 'schedules job for artifact stored remotely and synced locally' do
expect(worker).to receive(:schedule_job).with(:job_artifact, job_artifact_remote.id)
expect(worker).not_to receive(:schedule_job).with(anything, job_artifact_local.id)
worker.perform
end
it 'schedules worker for artifact stored remotely and synced locally' do
expect(Geo::FileRegistryRemovalWorker).to receive(:perform_async).with(:job_artifact, job_artifact_remote.id)
expect(Geo::FileRegistryRemovalWorker).not_to receive(:perform_async).with(anything, job_artifact_local.id)
worker.perform
end
end
end
# Disable transactions via :delete method because a foreign table
# can't see changes inside a transaction of a different connection.
describe 'when PostgreSQL FDW is available', :geo, :delete do
# Skip if FDW isn't activated on this database
it_behaves_like '#perform', Gitlab::Database.postgresql? && !Gitlab::Geo::Fdw.enabled?
end
describe 'when PostgreSQL FDW is not enabled', :geo do
before do
allow(Gitlab::Geo::Fdw).to receive(:enabled?).and_return(false)
end
it_behaves_like '#perform', false
end
end
Feature: Groups
Background:
Given I sign in as "John Doe"
And "John Doe" is owner of group "Owned"
Scenario: I should not see a group if it does not exist
When I visit group "NonExistentGroup" page
Then page status code should be 404
@javascript
Scenario: I should see group "Owned" dashboard list
When I visit group "Owned" page
Then I should see group "Owned" projects list
@javascript
Scenario: I should see group "Owned" activity feed
When I visit group "Owned" activity page
And I should see projects activity feed
Scenario: I should see group "Owned" issues list
Given project from group "Owned" has issues assigned to me
When I visit group "Owned" issues page
Then I should see issues from group "Owned" assigned to me
Scenario: I should not see issues from archived project in "Owned" group issues list
Given Group "Owned" has archived project
And the archived project have some issues
When I visit group "Owned" issues page
Then I should not see issues from the archived project
Scenario: I should see group "Owned" merge requests list
Given project from group "Owned" has merge requests assigned to me
When I visit group "Owned" merge requests page
Then I should see merge requests from group "Owned" assigned to me
Scenario: I should not see merge requests from archived project in "Owned" group merge requests list
Given Group "Owned" has archived project
And the archived project have some merge_requests
When I visit group "Owned" merge requests page
Then I should not see merge requests from the archived project
Scenario: I edit group "Owned" avatar
When I visit group "Owned" settings page
And I change group "Owned" avatar
And I visit group "Owned" settings page
Then I should see new group "Owned" avatar
And I should see the "Remove avatar" button
Scenario: I remove group "Owned" avatar
When I visit group "Owned" settings page
And I have group "Owned" avatar
And I visit group "Owned" settings page
And I remove group "Owned" avatar
Then I should not see group "Owned" avatar
And I should not see the "Remove avatar" button
Scenario: Add new LDAP synchronization
Given LDAP enabled
When I visit Group "Owned" LDAP settings page
And I add a new LDAP synchronization
Then I see a new LDAP synchronization listed
And LDAP disabled
# Group projects in settings
Scenario: I should see all projects in the project list in settings
Given Group "Owned" has archived project
When I visit group "Owned" projects page
Then I should see group "Owned" projects list
And I should see "archived" label
# Public group
@javascript
Scenario: Signed out user should see group
Given "Mary Jane" is owner of group "Owned"
And I am a signed out user
And Group "Owned" has a public project "Public-project"
When I visit group "Owned" page
Then I should see group "Owned"
Then I should see project "Public-project"
class Spinach::Features::Groups < Spinach::FeatureSteps
include SharedAuthentication
include SharedPaths
include SharedGroup
include SharedUser
step 'I should see group "Owned"' do
expect(page).to have_content 'Owned'
end
step 'I am a signed out user' do
logout
end
step 'Group "Owned" has a public project "Public-project"' do
group = owned_group
@project = create :project, :public,
group: group,
name: "Public-project"
end
step 'I should see project "Public-project"' do
expect(page).to have_content 'Public-project'
end
step 'I should see group "Owned" projects list' do
owned_group.projects.each do |project|
expect(page).to have_link project.name
end
end
step 'I should see projects activity feed' do
expect(page).to have_content 'joined project'
end
step 'I should see issues from group "Owned" assigned to me' do
assigned_to_me(:issues).each do |issue|
expect(page).to have_content issue.title
end
end
step 'I should not see issues from the archived project' do
@archived_project.issues.each do |issue|
expect(page).not_to have_content issue.title
end
end
step 'I should not see merge requests from the archived project' do
@archived_project.merge_requests.each do |mr|
expect(page).not_to have_content mr.title
end
end
step 'I should see merge requests from group "Owned" assigned to me' do
assigned_to_me(:merge_requests).each do |issue|
expect(page).to have_content issue.title[0..80]
end
end
step 'project from group "Owned" has issues assigned to me' do
create :issue,
project: project,
assignees: [current_user],
author: current_user
end
step 'project from group "Owned" has merge requests assigned to me' do
create :merge_request,
source_project: project,
target_project: project,
assignee: current_user,
author: current_user
end
step 'I should be redirected to group page' do
expect(current_path).to eq group_path(Group.last)
end
step 'I change group name' do
page.within '#tab-edit' do
fill_in 'group_name', with: 'new-name'
click_button "Save group"
end
end
step 'I change group "Owned" name to "new-name"' do
fill_in 'group_name', with: 'new-name'
fill_in 'group_path', with: 'new-name'
click_button "Save group"
end
step 'I should see new group "Owned" name' do
page.within ".navbar-gitlab" do
expect(page).to have_content "new-name"
end
end
step 'I change group "Owned" avatar' do
attach_file(:group_avatar, File.join(Rails.root, 'spec', 'fixtures', 'banana_sample.gif'))
click_button "Save group"
owned_group.reload
end
step 'I should see new group "Owned" avatar' do
expect(owned_group.avatar).to be_instance_of AvatarUploader
expect(owned_group.avatar.url).to eq "/uploads/-/system/group/avatar/#{Group.find_by(name: "Owned").id}/banana_sample.gif"
end
step 'I should see the "Remove avatar" button' do
expect(page).to have_link("Remove avatar")
end
step 'I have group "Owned" avatar' do
attach_file(:group_avatar, File.join(Rails.root, 'spec', 'fixtures', 'banana_sample.gif'))
click_button "Save group"
owned_group.reload
end
step 'I remove group "Owned" avatar' do
click_link "Remove avatar"
owned_group.reload
end
step 'I should not see group "Owned" avatar' do
expect(owned_group.avatar?).to eq false
end
step 'I should not see the "Remove avatar" button' do
expect(page).not_to have_link("Remove avatar")
end
step 'Group "Owned" has archived project' do
group = Group.find_by(name: 'Owned')
@archived_project = create(:project, :archived, namespace: group, path: "archived-project")
end
step 'I should see "archived" label' do
expect(page).to have_xpath("//span[@class='label label-warning']", text: 'archived')
end
step 'LDAP enabled' do
allow(Gitlab.config.ldap).to receive(:enabled).and_return(true)
end
step 'LDAP disabled' do
allow(Gitlab.config.ldap).to receive(:enabled).and_return(false)
end
step 'I add a new LDAP synchronization' do
page.within('form#new_ldap_group_link') do
find('#ldap_group_link_cn', visible: false).set('my-group-cn')
# fill_in('LDAP Group cn', with: 'my-group-cn', visible: false)
select 'Developer', from: "ldap_group_link_group_access"
click_button 'Add synchronization'
end
end
step 'I see a new LDAP synchronization listed' do
expect(page).not_to have_content('No LDAP synchronizations')
expect(page).to have_content('As Developer on ldap server')
end
step 'I visit group "NonExistentGroup" page' do
visit group_path("NonExistentGroup")
end
step 'the archived project have some issues' do
create :issue,
project: @archived_project,
assignees: [current_user],
author: current_user
end
step 'the archived project have some merge requests' do
create :merge_request,
source_project: @archived_project,
target_project: @archived_project,
assignee: current_user,
author: current_user
end
private
def assigned_to_me(key)
project.send(key).assigned_to(current_user)
end
def project
owned_group.projects.first
end
end
...@@ -52,10 +52,6 @@ module SharedPaths ...@@ -52,10 +52,6 @@ module SharedPaths
visit edit_group_path(Group.find_by(name: "Owned")) visit edit_group_path(Group.find_by(name: "Owned"))
end end
step 'I visit group "Owned" LDAP settings page' do
visit group_ldap_group_links_path(Group.find_by(name: "Owned"))
end
step 'I visit group "Owned" projects page' do step 'I visit group "Owned" projects page' do
visit projects_group_path(Group.find_by(name: "Owned")) visit projects_group_path(Group.find_by(name: "Owned"))
end end
......
require 'spec_helper'
describe Projects::Ci::LintsController do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
before do
sign_in(user)
end
describe 'GET #show' do
context 'with enough privileges' do
before do
project.add_developer(user)
get :show, namespace_id: project.namespace, project_id: project
end
it 'should be success' do
expect(response).to be_success
end
it 'should render show page' do
expect(response).to render_template :show
end
it 'should retrieve project' do
expect(assigns(:project)).to eq(project)
end
end
context 'without enough privileges' do
before do
project.add_guest(user)
get :show, namespace_id: project.namespace, project_id: project
end
it 'should respond with 404' do
expect(response).to have_gitlab_http_status(404)
end
end
end
describe 'POST #create' do
let(:remote_file_path) { 'https://gitlab.com/gitlab-org/gitlab-ce/blob/1234/.gitlab-ci-1.yml' }
let(:remote_file_content) do
<<~HEREDOC
before_script:
- apt-get update -qq && apt-get install -y -qq sqlite3 libsqlite3-dev nodejs
- ruby -v
- which ruby
- gem install bundler --no-ri --no-rdoc
- bundle install --jobs $(nproc) "${FLAGS[@]}"
HEREDOC
end
let(:content) do
<<~HEREDOC
include:
- #{remote_file_path}
rubocop:
script:
- bundle exec rubocop
HEREDOC
end
context 'with a valid gitlab-ci.yml' do
before do
WebMock.stub_request(:get, remote_file_path).to_return(body: remote_file_content)
project.add_developer(user)
post :create, namespace_id: project.namespace, project_id: project, content: content
end
it 'should be success' do
expect(response).to be_success
end
it 'render show page' do
expect(response).to render_template :show
end
it 'should retrieve project' do
expect(assigns(:project)).to eq(project)
end
end
context 'with an invalid gitlab-ci.yml' do
let(:content) do
<<~HEREDOC
rubocop:
scriptt:
- bundle exec rubocop
HEREDOC
end
before do
project.add_developer(user)
post :create, namespace_id: project.namespace, project_id: project, content: content
end
it 'should assign errors' do
expect(assigns[:error]).to eq('jobs:rubocop config contains unknown keys: scriptt')
end
end
context 'without enough privileges' do
before do
project.add_guest(user)
post :create, namespace_id: project.namespace, project_id: project, content: content
end
it 'should respond with 404' do
expect(response).to have_gitlab_http_status(404)
end
end
end
end
...@@ -8,11 +8,30 @@ feature 'Group activity page' do ...@@ -8,11 +8,30 @@ feature 'Group activity page' do
context 'when signed in' do context 'when signed in' do
before do before do
sign_in(user) sign_in(user)
visit path
end end
it_behaves_like "it has an RSS button with current_user's RSS token" describe 'RSS' do
it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" before do
visit path
end
it_behaves_like "it has an RSS button with current_user's RSS token"
it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token"
end
context 'when project in the group', :js do
let(:project) { create(:project, :public, namespace: group) }
before do
project.add_master(user)
visit path
end
it 'renders user joined to project event' do
expect(page).to have_content 'joined project'
end
end
end end
context 'when signed out' do context 'when signed out' do
......
...@@ -101,6 +101,27 @@ feature 'Edit group settings' do ...@@ -101,6 +101,27 @@ feature 'Edit group settings' do
end end
end end
end end
describe 'edit group avatar' do
before do
visit edit_group_path(group)
attach_file(:group_avatar, Rails.root.join('spec', 'fixtures', 'banana_sample.gif'))
expect { click_button 'Save group' }.to change { group.reload.avatar? }.to(true)
end
it 'uploads new group avatar' do
expect(group.avatar).to be_instance_of AvatarUploader
expect(group.avatar.url).to eq "/uploads/-/system/group/avatar/#{group.id}/banana_sample.gif"
expect(page).to have_link('Remove avatar')
end
it 'removes group avatar' do
expect { click_link 'Remove avatar' }.to change { group.reload.avatar? }.to(false)
expect(page).not_to have_link('Remove avatar')
end
end
end end
def update_path(new_group_path) def update_path(new_group_path)
......
...@@ -3,8 +3,11 @@ require 'spec_helper' ...@@ -3,8 +3,11 @@ require 'spec_helper'
feature 'Group issues page' do feature 'Group issues page' do
include FilteredSearchHelpers include FilteredSearchHelpers
let(:group) { create(:group) }
let(:project) { create(:project, :public, group: group)}
let(:path) { issues_group_path(group) }
context 'with shared examples' do context 'with shared examples' do
let(:path) { issues_group_path(group) }
let(:issuable) { create(:issue, project: project, title: "this is my created issuable")} let(:issuable) { create(:issue, project: project, title: "this is my created issuable")}
include_examples 'project features apply to issuables', Issue include_examples 'project features apply to issuables', Issue
...@@ -31,7 +34,6 @@ feature 'Group issues page' do ...@@ -31,7 +34,6 @@ feature 'Group issues page' do
let(:access_level) { ProjectFeature::ENABLED } let(:access_level) { ProjectFeature::ENABLED }
let(:user) { user_in_group } let(:user) { user_in_group }
let(:user2) { user_outside_group } let(:user2) { user_outside_group }
let(:path) { issues_group_path(group) }
it 'filters by only group users' do it 'filters by only group users' do
filtered_search.set('assignee:') filtered_search.set('assignee:')
...@@ -43,9 +45,7 @@ feature 'Group issues page' do ...@@ -43,9 +45,7 @@ feature 'Group issues page' do
end end
context 'issues list', :nested_groups do context 'issues list', :nested_groups do
let(:group) { create(:group)}
let(:subgroup) { create(:group, parent: group) } let(:subgroup) { create(:group, parent: group) }
let(:project) { create(:project, :public, group: group)}
let(:subgroup_project) { create(:project, :public, group: subgroup)} let(:subgroup_project) { create(:project, :public, group: subgroup)}
let!(:issue) { create(:issue, project: project, title: 'root group issue') } let!(:issue) { create(:issue, project: project, title: 'root group issue') }
let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') } let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') }
...@@ -59,5 +59,17 @@ feature 'Group issues page' do ...@@ -59,5 +59,17 @@ feature 'Group issues page' do
expect(page).to have_content('subgroup issue') expect(page).to have_content('subgroup issue')
end end
end end
context 'when project is archived' do
before do
project.archive!
end
it 'does not render issue' do
visit path
expect(page).not_to have_content issue.title
end
end
end end
end end
...@@ -5,14 +5,14 @@ feature 'Group merge requests page' do ...@@ -5,14 +5,14 @@ feature 'Group merge requests page' do
let(:path) { merge_requests_group_path(group) } let(:path) { merge_requests_group_path(group) }
let(:issuable) { create(:merge_request, source_project: project, target_project: project, title: 'this is my created issuable') } let(:issuable) { create(:merge_request, source_project: project, target_project: project, title: 'this is my created issuable') }
let(:access_level) { ProjectFeature::ENABLED }
let(:user) { user_in_group }
include_examples 'project features apply to issuables', MergeRequest include_examples 'project features apply to issuables', MergeRequest
context 'archived issuable' do context 'archived issuable' do
let(:project_archived) { create(:project, :archived, :merge_requests_enabled, :repository, group: group) } let(:project_archived) { create(:project, :archived, :merge_requests_enabled, :repository, group: group) }
let(:issuable_archived) { create(:merge_request, source_project: project_archived, target_project: project_archived, title: 'issuable of an archived project') } let(:issuable_archived) { create(:merge_request, source_project: project_archived, target_project: project_archived, title: 'issuable of an archived project') }
let(:access_level) { ProjectFeature::ENABLED }
let(:user) { user_in_group }
before do before do
issuable_archived issuable_archived
...@@ -36,9 +36,17 @@ feature 'Group merge requests page' do ...@@ -36,9 +36,17 @@ feature 'Group merge requests page' do
end end
end end
context 'when merge request assignee to user' do
before do
issuable.update!(assignee: user)
visit path
end
it { expect(page).to have_content issuable.title[0..80] }
end
context 'group filtered search', :js do context 'group filtered search', :js do
let(:access_level) { ProjectFeature::ENABLED }
let(:user) { user_in_group }
let(:user2) { user_outside_group } let(:user2) { user_outside_group }
it 'filters by assignee only group users' do it 'filters by assignee only group users' do
......
...@@ -15,14 +15,33 @@ feature 'Group show page' do ...@@ -15,14 +15,33 @@ feature 'Group show page' do
end end
it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token" it_behaves_like "an autodiscoverable RSS feed with current_user's RSS token"
context 'when group does not exist' do
let(:path) { group_path('not-exist') }
it { expect(status_code).to eq(404) }
end
end end
context 'when signed out' do context 'when signed out' do
before do describe 'RSS' do
visit path before do
visit path
end
it_behaves_like "an autodiscoverable RSS feed without an RSS token"
end end
it_behaves_like "an autodiscoverable RSS feed without an RSS token" context 'when group has a public project', :js do
let!(:project) { create(:project, :public, namespace: group) }
it 'renders public project' do
visit path
expect(page).to have_link group.name
expect(page).to have_link project.name
end
end
end end
context 'subgroup support' do context 'subgroup support' do
......
require 'rails_helper'
describe 'User browse group projects page' do
let(:user) { create :user }
let(:group) { create :group }
context 'when user is owner' do
before do
group.add_owner(user)
end
context 'when user signed in' do
before do
sign_in(user)
end
context 'when group has archived project', :js do
let!(:project) { create :project, :archived, namespace: group }
it 'renders projects list' do
visit projects_group_path(group)
expect(page).to have_link project.name
expect(page).to have_xpath("//span[@class='label label-warning']", text: 'archived')
end
end
end
end
end
require 'spec_helper' require 'spec_helper'
describe 'CI Lint', :js do describe 'CI Lint', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
before do before do
sign_in(create(:user)) project.add_developer(user)
sign_in(user)
visit ci_lint_path visit project_ci_lint_path(project)
find('#ci-editor') find('#ci-editor')
execute_script("ace.edit('ci-editor').setValue(#{yaml_content.to_json});") execute_script("ace.edit('ci-editor').setValue(#{yaml_content.to_json});")
......
import Vue from 'vue';
import component from 'ee/vue_shared/security_reports/components/error_row.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
describe('loading row', () => {
const Component = Vue.extend(component);
let vm;
beforeEach(() => {
vm = mountComponent(Component);
});
afterEach(() => {
vm.$destroy();
});
it('renders warning icon with error message', () => {
expect(vm.$el.querySelector('.report-block-list-icon span').classList).toContain(
'js-ci-status-icon-warning',
);
expect(vm.$el.querySelector('.report-block-list-issue-description').textContent.trim()).toEqual(
'There was an error loading results',
);
});
});
import Vue from 'vue';
import component from 'ee/vue_shared/security_reports/components/loading_row.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
describe('loading row', () => {
const Component = Vue.extend(component);
let vm;
beforeEach(() => {
vm = mountComponent(Component);
});
afterEach(() => {
vm.$destroy();
});
it('renders loading icon with message', () => {
expect(vm.$el.querySelector('.report-block-list-icon i').classList).toContain('fa-spin');
expect(vm.$el.querySelector('.report-block-list-issue-description').textContent.trim()).toEqual(
'in progress',
);
});
});
import Vue from 'vue';
import component from 'ee/vue_shared/security_reports/components/summary_row.vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
describe('Summary row', () => {
const Component = Vue.extend(component);
let vm;
const props = {
summary: 'SAST detected 1 new vulnerability and 1 fixed vulnerability',
popoverOptions: {
title: 'Static Application Security Testing (SAST)',
content: '<a>Learn more about SAST</a>',
},
statusIcon: 'warning',
};
beforeEach(() => {
vm = mountComponent(Component, props);
});
afterEach(() => {
vm.$destroy();
});
it('renders provided summary', () => {
expect(
vm.$el.querySelector('.report-block-list-issue-description-text').textContent.trim(),
).toEqual(props.summary);
});
it('renders provided icon', () => {
expect(vm.$el.querySelector('.report-block-list-icon span').classList).toContain(
'js-ci-status-icon-warning',
);
});
});
...@@ -111,6 +111,10 @@ RSpec.configure do |config| ...@@ -111,6 +111,10 @@ RSpec.configure do |config|
end end
# EE-specific stop # EE-specific stop
config.after(:all) do
TestEnv.clean_test_path
end
config.before(:example) do config.before(:example) do
# Skip pre-receive hook check so we can use the web editor and merge. # Skip pre-receive hook check so we can use the web editor and merge.
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil]) allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
......
require 'spec_helper' require 'spec_helper'
describe 'ci/lints/show' do describe 'projects/ci/lints/show' do
include Devise::Test::ControllerHelpers include Devise::Test::ControllerHelpers
let(:project) { create(:project, :repository) }
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) }
describe 'XSS protection' do describe 'XSS protection' do
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) }
before do before do
assign(:project, project)
assign(:status, true) assign(:status, true)
assign(:builds, config_processor.builds) assign(:builds, config_processor.builds)
assign(:stages, config_processor.stages) assign(:stages, config_processor.stages)
...@@ -48,22 +49,21 @@ describe 'ci/lints/show' do ...@@ -48,22 +49,21 @@ describe 'ci/lints/show' do
end end
end end
let(:content) do context 'when the content is valid' do
{ let(:content) do
build_template: { {
script: './build.sh', build_template: {
tags: ['dotnet'], script: './build.sh',
only: ['test@dude/repo'], tags: ['dotnet'],
except: ['deploy'], only: ['test@dude/repo'],
environment: 'testing' except: ['deploy'],
environment: 'testing'
}
} }
} end
end
let(:config_processor) { Gitlab::Ci::YamlProcessor.new(YAML.dump(content)) }
context 'when the content is valid' do
before do before do
assign(:project, project)
assign(:status, true) assign(:status, true)
assign(:builds, config_processor.builds) assign(:builds, config_processor.builds)
assign(:stages, config_processor.stages) assign(:stages, config_processor.stages)
...@@ -83,6 +83,7 @@ describe 'ci/lints/show' do ...@@ -83,6 +83,7 @@ describe 'ci/lints/show' do
context 'when the content is invalid' do context 'when the content is invalid' do
before do before do
assign(:project, project)
assign(:status, false) assign(:status, false)
assign(:error, 'Undefined error') assign(:error, 'Undefined error')
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment