Commit 4cefac10 authored by Dmitriy Zaporozhets's avatar Dmitriy Zaporozhets

Merge branch 'ce-to-ee-2018-07-10' into 'master'

CE upstream - 2018-07-10 15:22 UTC

Closes gitaly#387, gitaly#874, gitaly#750, and gitaly#665

See merge request gitlab-org/gitlab-ee!6453
parents 78db7be9 49158837
...@@ -82,14 +82,5 @@ export const expandAllFiles = ({ commit }) => { ...@@ -82,14 +82,5 @@ export const expandAllFiles = ({ commit }) => {
commit(types.EXPAND_ALL_FILES); commit(types.EXPAND_ALL_FILES);
}; };
export default { // prevent babel-plugin-rewire from generating an invalid default during karma tests
setBaseConfig, export default () => {};
fetchDiffFiles,
setInlineDiffViewType,
setParallelDiffViewType,
showCommentForm,
cancelCommentForm,
loadMoreLines,
loadCollapsedDiff,
expandAllFiles,
};
import Vue from 'vue';
import Vuex from 'vuex';
import diffsModule from './modules';
Vue.use(Vuex);
export default new Vuex.Store({
modules: {
diffs: diffsModule,
},
});
import actions from '../actions'; import * as actions from '../actions';
import * as getters from '../getters'; import * as getters from '../getters';
import mutations from '../mutations'; import mutations from '../mutations';
import createState from './diff_state'; import createState from './diff_state';
......
...@@ -200,7 +200,7 @@ js-autosize markdown-area js-vue-issue-note-form js-vue-textarea" ...@@ -200,7 +200,7 @@ js-autosize markdown-area js-vue-issue-note-form js-vue-textarea"
class="btn btn-cancel note-edit-cancel js-close-discussion-note-form" class="btn btn-cancel note-edit-cancel js-close-discussion-note-form"
type="button" type="button"
@click="cancelHandler()"> @click="cancelHandler()">
{{ __('Discard draft') }} Cancel
</button> </button>
</div> </div>
</form> </form>
......
...@@ -5,9 +5,13 @@ module TimeHelper ...@@ -5,9 +5,13 @@ module TimeHelper
seconds = interval_in_seconds - minutes * 60 seconds = interval_in_seconds - minutes * 60
if minutes >= 1 if minutes >= 1
"#{pluralize(minutes, "minute")} #{pluralize(seconds, "second")}" if seconds % 60 == 0
pluralize(minutes, "minute")
else else
"#{pluralize(seconds, "second")}" [pluralize(minutes, "minute"), pluralize(seconds, "second")].to_sentence
end
else
pluralize(seconds, "second")
end end
end end
......
...@@ -441,9 +441,9 @@ module Ci ...@@ -441,9 +441,9 @@ module Ci
end end
def artifacts_metadata_entry(path, **options) def artifacts_metadata_entry(path, **options)
artifacts_metadata.use_file do |metadata_path| artifacts_metadata.open do |metadata_stream|
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new( metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
metadata_path, metadata_stream,
path, path,
**options) **options)
......
...@@ -71,6 +71,28 @@ class GitlabUploader < CarrierWave::Uploader::Base ...@@ -71,6 +71,28 @@ class GitlabUploader < CarrierWave::Uploader::Base
File.join('/', self.class.base_dir, dynamic_segment, filename) File.join('/', self.class.base_dir, dynamic_segment, filename)
end end
def cached_size
size
end
def open
stream =
if file_storage?
File.open(path, "rb") if path
else
::Gitlab::HttpIO.new(url, cached_size) if url
end
return unless stream
return stream unless block_given?
begin
yield(stream)
ensure
stream.close
end
end
private private
# Designed to be overridden by child uploaders that have a dynamic path # Designed to be overridden by child uploaders that have a dynamic path
......
...@@ -18,14 +18,6 @@ class JobArtifactUploader < GitlabUploader ...@@ -18,14 +18,6 @@ class JobArtifactUploader < GitlabUploader
dynamic_segment dynamic_segment
end end
def open
if file_storage?
File.open(path, "rb") if path
else
::Gitlab::Ci::Trace::HttpIO.new(url, cached_size) if url
end
end
private private
def dynamic_segment def dynamic_segment
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
%ol %ol
%li %li
= _("Install a Runner compatible with GitLab CI") = _("Install a Runner compatible with GitLab CI")
= (_("(checkout the %{link} for information on how to install it).") % { link: link }).html_safe = (_("(check out the %{link} for information on how to install it).") % { link: link }).html_safe
%li %li
= _("Specify the following URL during the Runner setup:") = _("Specify the following URL during the Runner setup:")
%code#coordinator_address= root_url(only_path: false) %code#coordinator_address= root_url(only_path: false)
......
...@@ -40,5 +40,5 @@ ...@@ -40,5 +40,5 @@
= yield(:note_actions) = yield(:note_actions)
%a.btn.btn-cancel.js-note-discard{ role: "button", data: {cancel_text: "Discard draft" } } %a.btn.btn-cancel.js-note-discard{ role: "button", data: {cancel_text: "Cancel" } }
Discard draft Discard draft
---
title: Adds with_projects optional parameter to GET /groups/:id API endpoint
merge_request: 20494
author:
type: changed
---
title: Allow updating a project's avatar without other params
merge_request:
author: Jamie Schembri
type: fixed
---
title: Removes unused vuex code in mr refactor and removes unneeded dependencies
merge_request: 20499
author:
type: other
---
title: Access metadata directly from Object Storage
merge_request:
author:
type: performance
---
title: Rails5 MySQL fix rename_column as part of cleanup_concurrent_column_type_change
merge_request: 20514
author: Jasper Maes
type: fixed
...@@ -29,6 +29,11 @@ module ActiveRecord ...@@ -29,6 +29,11 @@ module ActiveRecord
def datetime_with_timezone(column_name, **options) def datetime_with_timezone(column_name, **options)
column(column_name, :datetime_with_timezone, options) column(column_name, :datetime_with_timezone, options)
end end
# Disable timestamp alias to datetime
def aliased_types(name, fallback)
fallback
end
end end
end end
end end
...@@ -210,6 +210,7 @@ Parameters: ...@@ -210,6 +210,7 @@ Parameters:
| --------- | ---- | -------- | ----------- | | --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user | | `id` | integer/string | yes | The ID or [URL-encoded path of the group](README.md#namespaced-path-encoding) owned by the authenticated user |
| `with_custom_attributes` | boolean | no | Include [custom attributes](custom_attributes.md) in response (admins only) | | `with_custom_attributes` | boolean | no | Include [custom attributes](custom_attributes.md) in response (admins only) |
| `with_projects` | boolean | no | Include details from projects that belong to the specified group (defaults to `true`). |
```bash ```bash
curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/groups/4 curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/groups/4
...@@ -362,6 +363,30 @@ Example response: ...@@ -362,6 +363,30 @@ Example response:
} }
``` ```
When adding the parameter `with_projects=false`, projects will not be returned.
```bash
curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/groups/4?with_projects=false
```
Example response:
```json
{
"id": 4,
"name": "Twitter",
"path": "twitter",
"description": "Aliquid qui quis dignissimos distinctio ut commodi voluptas est.",
"visibility": "public",
"avatar_url": null,
"web_url": "https://gitlab.example.com/groups/twitter",
"request_access_enabled": false,
"full_name": "Twitter",
"full_path": "twitter",
"parent_id": null
}
```
## New group ## New group
Creates a new project group. Available only for users who can create groups. Creates a new project group. Available only for users who can create groups.
......
...@@ -49,7 +49,7 @@ There are currently three options for `merge_method` to choose from: ...@@ -49,7 +49,7 @@ There are currently three options for `merge_method` to choose from:
## List all projects ## List all projects
Get a list of all visible projects across GitLab for the authenticated user. Get a list of all visible projects across GitLab for the authenticated user.
When accessed without authentication, only public projects are returned. When accessed without authentication, only public projects with "simple" fields are returned.
``` ```
GET /projects GET /projects
...@@ -62,7 +62,7 @@ GET /projects ...@@ -62,7 +62,7 @@ GET /projects
| `order_by` | string | no | Return projects ordered by `id`, `name`, `path`, `created_at`, `updated_at`, or `last_activity_at` fields. Default is `created_at` | | `order_by` | string | no | Return projects ordered by `id`, `name`, `path`, `created_at`, `updated_at`, or `last_activity_at` fields. Default is `created_at` |
| `sort` | string | no | Return projects sorted in `asc` or `desc` order. Default is `desc` | | `sort` | string | no | Return projects sorted in `asc` or `desc` order. Default is `desc` |
| `search` | string | no | Return list of projects matching the search criteria | | `search` | string | no | Return list of projects matching the search criteria |
| `simple` | boolean | no | Return only the ID, URL, name, and path of each project | | `simple` | boolean | no | Return only limited fields for each project. This is a no-op without authentication as then _only_ simple fields are returned. |
| `owned` | boolean | no | Limit by projects owned by the current user | | `owned` | boolean | no | Limit by projects owned by the current user |
| `membership` | boolean | no | Limit by projects that the current user is a member of | | `membership` | boolean | no | Limit by projects that the current user is a member of |
| `starred` | boolean | no | Limit by projects starred by the current user | | `starred` | boolean | no | Limit by projects starred by the current user |
...@@ -71,6 +71,41 @@ GET /projects ...@@ -71,6 +71,41 @@ GET /projects
| `with_issues_enabled` | boolean | no | Limit by enabled issues feature | | `with_issues_enabled` | boolean | no | Limit by enabled issues feature |
| `with_merge_requests_enabled` | boolean | no | Limit by enabled merge requests feature | | `with_merge_requests_enabled` | boolean | no | Limit by enabled merge requests feature |
When `simple=true` or the user is unauthenticated this returns something like:
```json
[
{
"id": 4,
"description": null,
"default_branch": "master",
"ssh_url_to_repo": "git@example.com:diaspora/diaspora-client.git",
"http_url_to_repo": "http://example.com/diaspora/diaspora-client.git",
"web_url": "http://example.com/diaspora/diaspora-client",
"readme_url": "http://example.com/diaspora/diaspora-client/blob/master/README.md",
"tag_list": [
"example",
"disapora client"
],
"name": "Diaspora Client",
"name_with_namespace": "Diaspora / Diaspora Client",
"path": "diaspora-client",
"path_with_namespace": "diaspora/diaspora-client",
"created_at": "2013-09-30T13:46:02Z",
"last_activity_at": "2013-09-30T13:46:02Z",
"forks_count": 0,
"avatar_url": "http://example.com/uploads/project/avatar/4/uploads/avatar.png",
"star_count": 0,
},
{
"id": 6,
"description": null,
"default_branch": "master",
...
```
When the user is authenticated and `simple` is not set this returns something like:
```json ```json
[ [
{ {
...@@ -252,7 +287,7 @@ GET /users/:user_id/projects ...@@ -252,7 +287,7 @@ GET /users/:user_id/projects
| `order_by` | string | no | Return projects ordered by `id`, `name`, `path`, `created_at`, `updated_at`, or `last_activity_at` fields. Default is `created_at` | | `order_by` | string | no | Return projects ordered by `id`, `name`, `path`, `created_at`, `updated_at`, or `last_activity_at` fields. Default is `created_at` |
| `sort` | string | no | Return projects sorted in `asc` or `desc` order. Default is `desc` | | `sort` | string | no | Return projects sorted in `asc` or `desc` order. Default is `desc` |
| `search` | string | no | Return list of projects matching the search criteria | | `search` | string | no | Return list of projects matching the search criteria |
| `simple` | boolean | no | Return only the ID, URL, name, and path of each project | | `simple` | boolean | no | Return only limited fields for each project. This is a no-op without authentication as then _only_ simple fields are returned. |
| `owned` | boolean | no | Limit by projects owned by the current user | | `owned` | boolean | no | Limit by projects owned by the current user |
| `membership` | boolean | no | Limit by projects that the current user is a member of | | `membership` | boolean | no | Limit by projects that the current user is a member of |
| `starred` | boolean | no | Limit by projects starred by the current user | | `starred` | boolean | no | Limit by projects starred by the current user |
...@@ -750,7 +785,7 @@ GET /projects/:id/forks ...@@ -750,7 +785,7 @@ GET /projects/:id/forks
| `order_by` | string | no | Return projects ordered by `id`, `name`, `path`, `created_at`, `updated_at`, or `last_activity_at` fields. Default is `created_at` | | `order_by` | string | no | Return projects ordered by `id`, `name`, `path`, `created_at`, `updated_at`, or `last_activity_at` fields. Default is `created_at` |
| `sort` | string | no | Return projects sorted in `asc` or `desc` order. Default is `desc` | | `sort` | string | no | Return projects sorted in `asc` or `desc` order. Default is `desc` |
| `search` | string | no | Return list of projects matching the search criteria | | `search` | string | no | Return list of projects matching the search criteria |
| `simple` | boolean | no | Return only the ID, URL, name, and path of each project | | `simple` | boolean | no | Return only limited fields for each project. This is a no-op without authentication as then _only_ simple fields are returned. |
| `owned` | boolean | no | Limit by projects owned by the current user | | `owned` | boolean | no | Limit by projects owned by the current user |
| `membership` | boolean | no | Limit by projects that the current user is a member of | | `membership` | boolean | no | Limit by projects that the current user is a member of |
| `starred` | boolean | no | Limit by projects starred by the current user | | `starred` | boolean | no | Limit by projects starred by the current user |
......
...@@ -425,7 +425,7 @@ There is a helper in `spec/javascripts/helpers/vue_mount_component_helper.js` th ...@@ -425,7 +425,7 @@ There is a helper in `spec/javascripts/helpers/vue_mount_component_helper.js` th
```javascript ```javascript
import Vue from 'vue'; import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper.js' import mountComponent from 'spec/helpers/vue_mount_component_helper'
import component from 'component.vue' import component from 'component.vue'
const Component = Vue.extend(component); const Component = Vue.extend(component);
......
...@@ -447,6 +447,15 @@ You can specify a different Git repository by providing it as an extra parameter ...@@ -447,6 +447,15 @@ You can specify a different Git repository by providing it as an extra parameter
sudo -u git -H bundle exec rake "gitlab:workhorse:install[/home/git/gitlab-workhorse,https://example.com/gitlab-workhorse.git]" RAILS_ENV=production sudo -u git -H bundle exec rake "gitlab:workhorse:install[/home/git/gitlab-workhorse,https://example.com/gitlab-workhorse.git]" RAILS_ENV=production
### Install gitlab-pages
GitLab-Pages uses [GNU Make](https://www.gnu.org/software/make/). This step is optional and only needed if you wish to host static sites from within GitLab. The following commands will install GitLab-Pages in `/home/git/gitlab-pages`. For additional setup steps, please consult the [administration guide](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/administration/pages/source.md) for your version of GitLab as the GitLab Pages daemon can be ran several different ways.
cd /home/git
sudo -u git -H git clone https://gitlab.com/gitlab-org/gitlab-pages.git
cd gitlab-pages
sudo -u git -H git checkout v$(</home/git/gitlab/GITLAB_PAGES_VERSION)
sudo -u git -H make
### Initialize Database and Activate Advanced Features ### Initialize Database and Activate Advanced Features
......
...@@ -22,8 +22,8 @@ Bitbucket.org. ...@@ -22,8 +22,8 @@ Bitbucket.org.
> **Note:** > **Note:**
GitLab 8.15 significantly simplified the way to integrate Bitbucket.org with GitLab 8.15 significantly simplified the way to integrate Bitbucket.org with
GitLab. You are encouraged to upgrade your GitLab instance if you haven't done GitLab. You are encouraged to upgrade your GitLab instance if you haven't done so
already. If you're using GitLab 8.14 and below, [use the previous integration already. If you're using GitLab 8.14 or below, [use the previous integration
docs][bb-old]. docs][bb-old].
To enable the Bitbucket OmniAuth provider you must register your application To enable the Bitbucket OmniAuth provider you must register your application
...@@ -64,7 +64,7 @@ you to use. ...@@ -64,7 +64,7 @@ you to use.
1. Select **Save**. 1. Select **Save**.
1. Select your newly created OAuth consumer and you should now see a Key and 1. Select your newly created OAuth consumer and you should now see a Key and
Secret in the list of OAuth customers. Keep this page open as you continue Secret in the list of OAuth consumers. Keep this page open as you continue
the configuration. the configuration.
![Bitbucket OAuth key](img/bitbucket_oauth_keys.png) ![Bitbucket OAuth key](img/bitbucket_oauth_keys.png)
...@@ -114,8 +114,8 @@ you to use. ...@@ -114,8 +114,8 @@ you to use.
from the Bitbucket application page. from the Bitbucket application page.
1. Save the configuration file. 1. Save the configuration file.
1. [Reconfigure][] or [restart GitLab][] for the changes to take effect if you 1. For the changes to take effect, [reconfigure GitLab][] if you installed via
installed GitLab via Omnibus or from source respectively. Omnibus, or [restart][] if installed from source.
On the sign in page there should now be a Bitbucket icon below the regular sign On the sign in page there should now be a Bitbucket icon below the regular sign
in form. Click the icon to begin the authentication process. Bitbucket will ask in form. Click the icon to begin the authentication process. Bitbucket will ask
...@@ -127,12 +127,12 @@ well, the user will be returned to GitLab and will be signed in. ...@@ -127,12 +127,12 @@ well, the user will be returned to GitLab and will be signed in.
Once the above configuration is set up, you can use Bitbucket to sign into Once the above configuration is set up, you can use Bitbucket to sign into
GitLab and [start importing your projects][bb-import]. GitLab and [start importing your projects][bb-import].
If you don't want to enable signing in with Bitbucket but just want to import If you want to import projects from Bitbucket, but don't want to enable signing in,
projects from Bitbucket, you could [disable it in the admin panel](omniauth.md#enable-or-disable-sign-in-with-an-omniauth-provider-without-disabling-import-sources). you can [disable Sign-Ins in the admin panel](omniauth.md#enable-or-disable-sign-in-with-an-omniauth-provider-without-disabling-import-sources).
[init-oauth]: omniauth.md#initial-omniauth-configuration [init-oauth]: omniauth.md#initial-omniauth-configuration
[bb-import]: ../workflow/importing/import_projects_from_bitbucket.md [bb-import]: ../workflow/importing/import_projects_from_bitbucket.md
[bb-old]: https://gitlab.com/gitlab-org/gitlab-ce/blob/8-14-stable/doc/integration/bitbucket.md [bb-old]: https://gitlab.com/gitlab-org/gitlab-ce/blob/8-14-stable/doc/integration/bitbucket.md
[bitbucket-docs]: https://confluence.atlassian.com/bitbucket/use-the-ssh-protocol-with-bitbucket-cloud-221449711.html#UsetheSSHprotocolwithBitbucketCloud-KnownhostorBitbucket%27spublickeyfingerprints [bitbucket-docs]: https://confluence.atlassian.com/bitbucket/use-the-ssh-protocol-with-bitbucket-cloud-221449711.html#UsetheSSHprotocolwithBitbucketCloud-KnownhostorBitbucket%27spublickeyfingerprints
[reconfigure]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure [reconfigure GitLab]: ../administration/restart_gitlab.md#omnibus-gitlab-reconfigure
[restart GitLab]: ../administration/restart_gitlab.md#installations-from-source [restart]: ../administration/restart_gitlab.md#installations-from-source
...@@ -187,7 +187,24 @@ sudo -u git -H git checkout v$(</home/git/gitlab/GITALY_SERVER_VERSION) ...@@ -187,7 +187,24 @@ sudo -u git -H git checkout v$(</home/git/gitlab/GITALY_SERVER_VERSION)
sudo -u git -H make sudo -u git -H make
``` ```
### 10. Update MySQL permissions ### 10. Update gitlab-pages
#### Only needed if you use GitLab Pages.
Install and compile gitlab-pages. GitLab-Pages uses
[GNU Make](https://www.gnu.org/software/make/).
If you are not using Linux you may have to run `gmake` instead of
`make` below.
```bash
cd /home/git/gitlab-pages
sudo -u git -H git fetch --all --tags --prune
sudo -u git -H git checkout v$(</home/git/gitlab/GITLAB_PAGES_VERSION)
sudo -u git -H make
```
### 11. Update MySQL permissions
If you are using MySQL you need to grant the GitLab user the necessary If you are using MySQL you need to grant the GitLab user the necessary
permissions on the database: permissions on the database:
...@@ -209,7 +226,7 @@ You can make this setting permanent by adding it to your `my.cnf`: ...@@ -209,7 +226,7 @@ You can make this setting permanent by adding it to your `my.cnf`:
log_bin_trust_function_creators=1 log_bin_trust_function_creators=1
``` ```
### 11. Update configuration files ### 12. Update configuration files
#### New configuration options for `gitlab.yml` #### New configuration options for `gitlab.yml`
...@@ -283,7 +300,7 @@ For Ubuntu 16.04.1 LTS: ...@@ -283,7 +300,7 @@ For Ubuntu 16.04.1 LTS:
sudo systemctl daemon-reload sudo systemctl daemon-reload
``` ```
### 12. Install libs, migrations, etc. ### 13. Install libs, migrations, etc.
```bash ```bash
cd /home/git/gitlab cd /home/git/gitlab
...@@ -313,14 +330,14 @@ sudo -u git -H bundle exec rake cache:clear RAILS_ENV=production ...@@ -313,14 +330,14 @@ sudo -u git -H bundle exec rake cache:clear RAILS_ENV=production
**MySQL installations**: Run through the `MySQL strings limits` and `Tables and data conversion to utf8mb4` [tasks](../install/database_mysql.md). **MySQL installations**: Run through the `MySQL strings limits` and `Tables and data conversion to utf8mb4` [tasks](../install/database_mysql.md).
### 13. Start application ### 14. Start application
```bash ```bash
sudo service gitlab start sudo service gitlab start
sudo service nginx restart sudo service nginx restart
``` ```
### 14. Check application status ### 15. Check application status
Check if GitLab and its environment are configured correctly: Check if GitLab and its environment are configured correctly:
......
require 'spec_helper'
describe JobArtifactUploader do
let(:store) { ObjectStorage::Store::LOCAL }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) }
describe '#open' do
subject { uploader.open }
context 'when trace is stored in Object storage' do
before do
allow(uploader).to receive(:file_storage?) { false }
allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
end
it 'returns http io stream' do
is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
end
end
end
end
...@@ -184,12 +184,13 @@ module API ...@@ -184,12 +184,13 @@ module API
end end
params do params do
use :with_custom_attributes use :with_custom_attributes
optional :with_projects, type: Boolean, default: true, desc: 'Omit project details'
end end
get ":id" do get ":id" do
group = find_group!(params[:id]) group = find_group!(params[:id])
options = { options = {
with: Entities::GroupDetail, with: params[:with_projects] ? Entities::GroupDetail : Entities::Group,
current_user: current_user current_user: current_user
} }
......
...@@ -260,7 +260,8 @@ module API ...@@ -260,7 +260,8 @@ module API
:snippets_enabled, :snippets_enabled,
:tag_list, :tag_list,
:visibility, :visibility,
:wiki_enabled :wiki_enabled,
:avatar
] ]
optional :name, type: String, desc: 'The name of the project' optional :name, type: String, desc: 'The name of the project'
optional :default_branch, type: String, desc: 'The default branch of the project' optional :default_branch, type: String, desc: 'The default branch of the project'
......
...@@ -7,14 +7,15 @@ module Gitlab ...@@ -7,14 +7,15 @@ module Gitlab
module Artifacts module Artifacts
class Metadata class Metadata
ParserError = Class.new(StandardError) ParserError = Class.new(StandardError)
InvalidStreamError = Class.new(StandardError)
VERSION_PATTERN = /^[\w\s]+(\d+\.\d+\.\d+)/ VERSION_PATTERN = /^[\w\s]+(\d+\.\d+\.\d+)/
INVALID_PATH_PATTERN = %r{(^\.?\.?/)|(/\.?\.?/)} INVALID_PATH_PATTERN = %r{(^\.?\.?/)|(/\.?\.?/)}
attr_reader :file, :path, :full_version attr_reader :stream, :path, :full_version
def initialize(file, path, **opts) def initialize(stream, path, **opts)
@file, @path, @opts = file, path, opts @stream, @path, @opts = stream, path, opts
@full_version = read_version @full_version = read_version
end end
...@@ -103,7 +104,17 @@ module Gitlab ...@@ -103,7 +104,17 @@ module Gitlab
end end
def gzip(&block) def gzip(&block)
Zlib::GzipReader.open(@file, &block) raise InvalidStreamError, "Invalid stream" unless @stream
# restart gzip reading
@stream.seek(0)
gz = Zlib::GzipReader.new(@stream)
yield(gz)
rescue Zlib::Error => e
raise InvalidStreamError, e.message
ensure
gz&.finish
end end
end end
end end
......
##
# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
# source: https://gitlab.com/snippets/1685610
module Gitlab
module Ci
class Trace
class HttpIO
BUFFER_SIZE = 128.kilobytes
InvalidURLError = Class.new(StandardError)
FailedToGetChunkError = Class.new(StandardError)
attr_reader :uri, :size
attr_reader :tell
attr_reader :chunk, :chunk_range
alias_method :pos, :tell
def initialize(url, size)
raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
@uri = URI(url)
@size = size
@tell = 0
end
def close
# no-op
end
def binmode
# no-op
end
def binmode?
true
end
def path
nil
end
def url
@uri.to_s
end
def seek(pos, where = IO::SEEK_SET)
new_pos =
case where
when IO::SEEK_END
size + pos
when IO::SEEK_SET
pos
when IO::SEEK_CUR
tell + pos
else
-1
end
raise 'new position is outside of file' if new_pos < 0 || new_pos > size
@tell = new_pos
end
def eof?
tell == size
end
def each_line
until eof?
line = readline
break if line.nil?
yield(line)
end
end
def read(length = nil, outbuf = "")
out = ""
length ||= size - tell
until length <= 0 || eof?
data = get_chunk
break if data.empty?
chunk_bytes = [BUFFER_SIZE - chunk_offset, length].min
chunk_data = data.byteslice(0, chunk_bytes)
out << chunk_data
@tell += chunk_data.bytesize
length -= chunk_data.bytesize
end
# If outbuf is passed, we put the output into the buffer. This supports IO.copy_stream functionality
if outbuf
outbuf.slice!(0, outbuf.bytesize)
outbuf << out
end
out
end
def readline
out = ""
until eof?
data = get_chunk
new_line = data.index("\n")
if !new_line.nil?
out << data[0..new_line]
@tell += new_line + 1
break
else
out << data
@tell += data.bytesize
end
end
out
end
def write(data)
raise NotImplementedError
end
def truncate(offset)
raise NotImplementedError
end
def flush
raise NotImplementedError
end
def present?
true
end
private
##
# The below methods are not implemented in IO class
#
def in_range?
@chunk_range&.include?(tell)
end
def get_chunk
unless in_range?
response = Net::HTTP.start(uri.hostname, uri.port, proxy_from_env: true, use_ssl: uri.scheme == 'https') do |http|
http.request(request)
end
raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
@chunk = response.body.force_encoding(Encoding::BINARY)
@chunk_range = response.content_range
##
# Note: If provider does not return content_range, then we set it as we requested
# Provider: minio
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: AWS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: GCS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
@chunk_range ||= (chunk_start...(chunk_start + @chunk.bytesize))
end
@chunk[chunk_offset..BUFFER_SIZE]
end
def request
Net::HTTP::Get.new(uri).tap do |request|
request.set_range(chunk_start, BUFFER_SIZE)
end
end
def chunk_offset
tell % BUFFER_SIZE
end
def chunk_start
(tell / BUFFER_SIZE) * BUFFER_SIZE
end
def chunk_end
[chunk_start + BUFFER_SIZE, size].min
end
end
end
end
end
...@@ -904,12 +904,8 @@ module Gitlab ...@@ -904,12 +904,8 @@ module Gitlab
end end
def fetch_source_branch!(source_repository, source_branch, local_ref) def fetch_source_branch!(source_repository, source_branch, local_ref)
Gitlab::GitalyClient.migrate(:fetch_source_branch) do |is_enabled| wrapped_gitaly_errors do
if is_enabled
gitaly_repository_client.fetch_source_branch(source_repository, source_branch, local_ref) gitaly_repository_client.fetch_source_branch(source_repository, source_branch, local_ref)
else
rugged_fetch_source_branch(source_repository, source_branch, local_ref)
end
end end
end end
...@@ -1064,12 +1060,8 @@ module Gitlab ...@@ -1064,12 +1060,8 @@ module Gitlab
end end
def bundle_to_disk(save_path) def bundle_to_disk(save_path)
gitaly_migrate(:bundle_to_disk) do |is_enabled| wrapped_gitaly_errors do
if is_enabled
gitaly_repository_client.create_bundle(save_path) gitaly_repository_client.create_bundle(save_path)
else
run_git!(%W(bundle create #{save_path} --all))
end
end end
true true
......
...@@ -25,10 +25,12 @@ module Gitlab ...@@ -25,10 +25,12 @@ module Gitlab
def conflicts? def conflicts?
list_conflict_files.any? list_conflict_files.any?
rescue GRPC::FailedPrecondition rescue GRPC::FailedPrecondition, GRPC::Unknown
# The server raises this exception when it encounters ConflictSideMissing, which # The server raises FailedPrecondition when it encounters
# means a conflict exists but its `theirs` or `ours` data is nil due to a non-existent # ConflictSideMissing, which means a conflict exists but its `theirs` or
# file in one of the trees. # `ours` data is nil due to a non-existent file in one of the trees.
#
# GRPC::Unknown comes from Rugged::ReferenceError and Rugged::OdbError.
true true
end end
......
##
# This class is compatible with IO class (https://ruby-doc.org/core-2.3.1/IO.html)
# source: https://gitlab.com/snippets/1685610
module Gitlab
class HttpIO
BUFFER_SIZE = 128.kilobytes
InvalidURLError = Class.new(StandardError)
FailedToGetChunkError = Class.new(StandardError)
attr_reader :uri, :size
attr_reader :tell
attr_reader :chunk, :chunk_range
alias_method :pos, :tell
def initialize(url, size)
raise InvalidURLError unless ::Gitlab::UrlSanitizer.valid?(url)
@uri = URI(url)
@size = size
@tell = 0
end
def close
# no-op
end
def binmode
# no-op
end
def binmode?
true
end
def path
nil
end
def url
@uri.to_s
end
def seek(pos, where = IO::SEEK_SET)
new_pos =
case where
when IO::SEEK_END
size + pos
when IO::SEEK_SET
pos
when IO::SEEK_CUR
tell + pos
else
-1
end
raise 'new position is outside of file' if new_pos < 0 || new_pos > size
@tell = new_pos
end
def eof?
tell == size
end
def each_line
until eof?
line = readline
break if line.nil?
yield(line)
end
end
def read(length = nil, outbuf = "")
out = ""
length ||= size - tell
until length <= 0 || eof?
data = get_chunk
break if data.empty?
chunk_bytes = [BUFFER_SIZE - chunk_offset, length].min
chunk_data = data.byteslice(0, chunk_bytes)
out << chunk_data
@tell += chunk_data.bytesize
length -= chunk_data.bytesize
end
# If outbuf is passed, we put the output into the buffer. This supports IO.copy_stream functionality
if outbuf
outbuf.slice!(0, outbuf.bytesize)
outbuf << out
end
out
end
def readline
out = ""
until eof?
data = get_chunk
new_line = data.index("\n")
if !new_line.nil?
out << data[0..new_line]
@tell += new_line + 1
break
else
out << data
@tell += data.bytesize
end
end
out
end
def write(data)
raise NotImplementedError
end
def truncate(offset)
raise NotImplementedError
end
def flush
raise NotImplementedError
end
def present?
true
end
private
##
# The below methods are not implemented in IO class
#
def in_range?
@chunk_range&.include?(tell)
end
def get_chunk
unless in_range?
response = Net::HTTP.start(uri.hostname, uri.port, proxy_from_env: true, use_ssl: uri.scheme == 'https') do |http|
http.request(request)
end
raise FailedToGetChunkError unless response.code == '200' || response.code == '206'
@chunk = response.body.force_encoding(Encoding::BINARY)
@chunk_range = response.content_range
##
# Note: If provider does not return content_range, then we set it as we requested
# Provider: minio
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: AWS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# Provider: GCS
# - When the file size is larger than requested Content-range, the Content-range is included in responces with Net::HTTPPartialContent 206
# - When the file size is smaller than requested Content-range, the Content-range is included in responces with Net::HTTPOK 200
@chunk_range ||= (chunk_start...(chunk_start + @chunk.bytesize))
end
@chunk[chunk_offset..BUFFER_SIZE]
end
def request
Net::HTTP::Get.new(uri).tap do |request|
request.set_range(chunk_start, BUFFER_SIZE)
end
end
def chunk_offset
tell % BUFFER_SIZE
end
def chunk_start
(tell / BUFFER_SIZE) * BUFFER_SIZE
end
def chunk_end
[chunk_start + BUFFER_SIZE, size].min
end
end
end
...@@ -92,21 +92,13 @@ module Gitlab ...@@ -92,21 +92,13 @@ module Gitlab
# Ex. # Ex.
# import_repository("nfs-file06", "gitlab/gitlab-ci", "https://gitlab.com/gitlab-org/gitlab-test.git") # import_repository("nfs-file06", "gitlab/gitlab-ci", "https://gitlab.com/gitlab-org/gitlab-test.git")
# #
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/874
def import_repository(storage, name, url) def import_repository(storage, name, url)
if url.start_with?('.', '/') if url.start_with?('.', '/')
raise Error.new("don't use disk paths with import_repository: #{url.inspect}") raise Error.new("don't use disk paths with import_repository: #{url.inspect}")
end end
relative_path = "#{name}.git" relative_path = "#{name}.git"
cmd = gitaly_migrate(:import_repository, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled| cmd = GitalyGitlabProjects.new(storage, relative_path)
if is_enabled
GitalyGitlabProjects.new(storage, relative_path)
else
# The timeout ensures the subprocess won't hang forever
gitlab_projects(storage, relative_path)
end
end
success = cmd.import_project(url, git_timeout) success = cmd.import_project(url, git_timeout)
raise Error, cmd.output unless success raise Error, cmd.output unless success
...@@ -126,12 +118,8 @@ module Gitlab ...@@ -126,12 +118,8 @@ module Gitlab
# fetch_remote(my_repo, "upstream") # fetch_remote(my_repo, "upstream")
# #
def fetch_remote(repository, remote, ssh_auth: nil, forced: false, no_tags: false, prune: true) def fetch_remote(repository, remote, ssh_auth: nil, forced: false, no_tags: false, prune: true)
gitaly_migrate(:fetch_remote) do |is_enabled| wrapped_gitaly_errors do
if is_enabled
repository.gitaly_repository_client.fetch_remote(remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, timeout: git_timeout, prune: prune) repository.gitaly_repository_client.fetch_remote(remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, timeout: git_timeout, prune: prune)
else
local_fetch_remote(repository.storage, repository.relative_path, remote, ssh_auth: ssh_auth, forced: forced, no_tags: no_tags, prune: prune)
end
end end
end end
...@@ -389,28 +377,6 @@ module Gitlab ...@@ -389,28 +377,6 @@ module Gitlab
) )
end end
def local_fetch_remote(storage_name, repository_relative_path, remote, ssh_auth: nil, forced: false, no_tags: false, prune: true)
vars = { force: forced, tags: !no_tags, prune: prune }
if ssh_auth&.ssh_import?
if ssh_auth.ssh_key_auth? && ssh_auth.ssh_private_key.present?
vars[:ssh_key] = ssh_auth.ssh_private_key
end
if ssh_auth.ssh_known_hosts.present?
vars[:known_hosts] = ssh_auth.ssh_known_hosts
end
end
cmd = gitlab_projects(storage_name, repository_relative_path)
success = cmd.fetch_remote(remote, git_timeout, vars)
raise Error, cmd.output unless success
success
end
def gitlab_shell_fast_execute(cmd) def gitlab_shell_fast_execute(cmd)
output, status = gitlab_shell_fast_execute_helper(cmd) output, status = gitlab_shell_fast_execute_helper(cmd)
...@@ -440,10 +406,6 @@ module Gitlab ...@@ -440,10 +406,6 @@ module Gitlab
Gitlab.config.gitlab_shell.git_timeout Gitlab.config.gitlab_shell.git_timeout
end end
def gitaly_migrate(method, status: Gitlab::GitalyClient::MigrationStatus::OPT_IN, &block)
wrapped_gitaly_errors { Gitlab::GitalyClient.migrate(method, status: status, &block) }
end
def wrapped_gitaly_errors def wrapped_gitaly_errors
yield yield
rescue GRPC::NotFound, GRPC::BadStatus => e rescue GRPC::NotFound, GRPC::BadStatus => e
......
...@@ -8,8 +8,8 @@ msgid "" ...@@ -8,8 +8,8 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: gitlab 1.0.0\n" "Project-Id-Version: gitlab 1.0.0\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-07-10 08:56+0200\n" "POT-Creation-Date: 2018-07-10 13:57-0500\n"
"PO-Revision-Date: 2018-07-10 08:56+0200\n" "PO-Revision-Date: 2018-07-10 13:57-0500\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n" "Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n" "Language: \n"
...@@ -159,7 +159,7 @@ msgstr[1] "" ...@@ -159,7 +159,7 @@ msgstr[1] ""
msgid "%{unstaged} unstaged and %{staged} staged changes" msgid "%{unstaged} unstaged and %{staged} staged changes"
msgstr "" msgstr ""
msgid "(checkout the %{link} for information on how to install it)." msgid "(check out the %{link} for information on how to install it)."
msgstr "" msgstr ""
msgid "+ %{moreCount} more" msgid "+ %{moreCount} more"
......
...@@ -216,17 +216,19 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do ...@@ -216,17 +216,19 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end end
context 'when trace artifact is in ObjectStorage' do context 'when trace artifact is in ObjectStorage' do
let(:url) { 'http://object-storage/trace' }
let(:file_path) { expand_fixture_path('trace/sample_trace') }
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) } let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
before do before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false } allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url } allow_any_instance_of(JobArtifactUploader).to receive(:url) { url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size } allow_any_instance_of(JobArtifactUploader).to receive(:size) { File.size(file_path) }
end end
context 'when there are no network issues' do context 'when there are no network issues' do
before do before do
stub_remote_trace_206 stub_remote_url_206(url, file_path)
get_trace get_trace
end end
...@@ -241,11 +243,11 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do ...@@ -241,11 +243,11 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
context 'when there is a network issue' do context 'when there is a network issue' do
before do before do
stub_remote_trace_500 stub_remote_url_500(url)
end end
it 'returns a trace' do it 'returns a trace' do
expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) expect { get_trace }.to raise_error(Gitlab::HttpIO::FailedToGetChunkError)
end end
end end
end end
......
...@@ -62,7 +62,7 @@ describe "User adds a comment on a commit", :js do ...@@ -62,7 +62,7 @@ describe "User adds a comment on a commit", :js do
click_diff_line(sample_commit.line_code) click_diff_line(sample_commit.line_code)
expect(page).to have_css(".js-temp-notes-holder form.new-note") expect(page).to have_css(".js-temp-notes-holder form.new-note")
.and have_css(".js-close-discussion-note-form", text: "Discard draft") .and have_css(".js-close-discussion-note-form", text: "Cancel")
# The `Cancel` button closes the current form. The page should not have any open forms after that. # The `Cancel` button closes the current form. The page should not have any open forms after that.
find(".js-close-discussion-note-form").click find(".js-close-discussion-note-form").click
......
...@@ -4,10 +4,12 @@ describe TimeHelper do ...@@ -4,10 +4,12 @@ describe TimeHelper do
describe "#time_interval_in_words" do describe "#time_interval_in_words" do
it "returns minutes and seconds" do it "returns minutes and seconds" do
intervals_in_words = { intervals_in_words = {
100 => "1 minute 40 seconds", 60 => "1 minute",
100.32 => "1 minute 40 seconds", 100 => "1 minute and 40 seconds",
121 => "2 minutes 1 second", 100.32 => "1 minute and 40 seconds",
3721 => "62 minutes 1 second", 120 => "2 minutes",
121 => "2 minutes and 1 second",
3721 => "62 minutes and 1 second",
0 => "0 seconds" 0 => "0 seconds"
} }
......
import Vue from 'vue'; import Vue from 'vue';
import $ from 'jquery'; import Vuex from 'vuex';
import { mountComponentWithStore } from 'spec/helpers'; import { mountComponentWithStore } from 'spec/helpers';
import store from '~/diffs/store'; import diffsModule from '~/diffs/store/modules';
import ChangedFiles from '~/diffs/components/changed_files.vue'; import changedFiles from '~/diffs/components/changed_files.vue';
describe('ChangedFiles', () => { describe('ChangedFiles', () => {
const Component = Vue.extend(ChangedFiles); const Component = Vue.extend(changedFiles);
const createComponent = props => mountComponentWithStore(Component, { props, store }); const store = new Vuex.Store({
modules: {
diffs: diffsModule,
},
});
let vm; let vm;
beforeEach(() => { beforeEach(() => {
...@@ -14,6 +19,7 @@ describe('ChangedFiles', () => { ...@@ -14,6 +19,7 @@ describe('ChangedFiles', () => {
<div id="dummy-element"></div> <div id="dummy-element"></div>
<div class="js-tabs-affix"></div> <div class="js-tabs-affix"></div>
`); `);
const props = { const props = {
diffFiles: [ diffFiles: [
{ {
...@@ -26,7 +32,8 @@ describe('ChangedFiles', () => { ...@@ -26,7 +32,8 @@ describe('ChangedFiles', () => {
}, },
], ],
}; };
vm = createComponent(props);
vm = mountComponentWithStore(Component, { props, store });
}); });
describe('with single file added', () => { describe('with single file added', () => {
...@@ -40,7 +47,6 @@ describe('ChangedFiles', () => { ...@@ -40,7 +47,6 @@ describe('ChangedFiles', () => {
}); });
}); });
describe('template', () => {
describe('diff view mode buttons', () => { describe('diff view mode buttons', () => {
let inlineButton; let inlineButton;
let parallelButton; let parallelButton;
...@@ -79,13 +85,13 @@ describe('ChangedFiles', () => { ...@@ -79,13 +85,13 @@ describe('ChangedFiles', () => {
describe('clicking them', () => { describe('clicking them', () => {
it('should toggle the diff view type', done => { it('should toggle the diff view type', done => {
$(parallelButton).click(); parallelButton.click();
vm.$nextTick(() => { vm.$nextTick(() => {
expect(inlineButton.classList.contains('active')).toEqual(false); expect(inlineButton.classList.contains('active')).toEqual(false);
expect(parallelButton.classList.contains('active')).toEqual(true); expect(parallelButton.classList.contains('active')).toEqual(true);
$(inlineButton).click(); inlineButton.click();
vm.$nextTick(() => { vm.$nextTick(() => {
expect(inlineButton.classList.contains('active')).toEqual(true); expect(inlineButton.classList.contains('active')).toEqual(true);
...@@ -96,5 +102,4 @@ describe('ChangedFiles', () => { ...@@ -96,5 +102,4 @@ describe('ChangedFiles', () => {
}); });
}); });
}); });
});
}); });
...@@ -2,13 +2,21 @@ require 'spec_helper' ...@@ -2,13 +2,21 @@ require 'spec_helper'
describe Gitlab::Ci::Build::Artifacts::Metadata do describe Gitlab::Ci::Build::Artifacts::Metadata do
def metadata(path = '', **opts) def metadata(path = '', **opts)
described_class.new(metadata_file_path, path, **opts) described_class.new(metadata_file_stream, path, **opts)
end end
let(:metadata_file_path) do let(:metadata_file_path) do
Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz' Rails.root + 'spec/fixtures/ci_build_artifacts_metadata.gz'
end end
let(:metadata_file_stream) do
File.open(metadata_file_path) if metadata_file_path
end
after do
metadata_file_stream&.close
end
context 'metadata file exists' do context 'metadata file exists' do
describe '#find_entries! empty string' do describe '#find_entries! empty string' do
subject { metadata('').find_entries! } subject { metadata('').find_entries! }
...@@ -86,11 +94,21 @@ describe Gitlab::Ci::Build::Artifacts::Metadata do ...@@ -86,11 +94,21 @@ describe Gitlab::Ci::Build::Artifacts::Metadata do
end end
context 'metadata file does not exist' do context 'metadata file does not exist' do
let(:metadata_file_path) { '' } let(:metadata_file_path) { nil }
describe '#find_entries!' do
it 'raises error' do
expect { metadata.find_entries! }.to raise_error(described_class::InvalidStreamError, /Invalid stream/)
end
end
end
context 'metadata file is invalid' do
let(:metadata_file_path) { Rails.root + 'spec/fixtures/ci_build_artifacts.zip' }
describe '#find_entries!' do describe '#find_entries!' do
it 'raises error' do it 'raises error' do
expect { metadata.find_entries! }.to raise_error(Errno::ENOENT) expect { metadata.find_entries! }.to raise_error(described_class::InvalidStreamError, /not in gzip format/)
end end
end end
end end
......
...@@ -1716,7 +1716,6 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -1716,7 +1716,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end end
describe '#fetch_source_branch!' do describe '#fetch_source_branch!' do
shared_examples '#fetch_source_branch!' do
let(:local_ref) { 'refs/merge-requests/1/head' } let(:local_ref) { 'refs/merge-requests/1/head' }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') } let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
let(:source_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') } let(:source_repository) { Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '') }
...@@ -1765,13 +1764,6 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -1765,13 +1764,6 @@ describe Gitlab::Git::Repository, seed_helper: true do
end end
end end
it_behaves_like '#fetch_source_branch!'
context 'without gitaly', :skip_gitaly_mock do
it_behaves_like '#fetch_source_branch!'
end
end
describe '#rm_branch' do describe '#rm_branch' do
shared_examples "user deleting a branch" do shared_examples "user deleting a branch" do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::Ci::Trace::HttpIO do describe Gitlab::HttpIO do
include HttpIOHelpers include HttpIOHelpers
let(:http_io) { described_class.new(url, size) } let(:http_io) { described_class.new(url, size) }
let(:url) { remote_trace_url }
let(:size) { remote_trace_size } let(:url) { 'http://object-storage/trace' }
let(:file_path) { expand_fixture_path('trace/sample_trace') }
let(:file_body) { File.read(file_path).force_encoding(Encoding::BINARY) }
let(:size) { File.size(file_path) }
describe '#close' do describe '#close' do
subject { http_io.close } subject { http_io.close }
...@@ -86,10 +89,10 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -86,10 +89,10 @@ describe Gitlab::Ci::Trace::HttpIO do
describe '#each_line' do describe '#each_line' do
subject { http_io.each_line } subject { http_io.each_line }
let(:string_io) { StringIO.new(remote_trace_body) } let(:string_io) { StringIO.new(file_body) }
before do before do
stub_remote_trace_206 stub_remote_url_206(url, file_path)
end end
it 'yields lines' do it 'yields lines' do
...@@ -99,7 +102,7 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -99,7 +102,7 @@ describe Gitlab::Ci::Trace::HttpIO do
context 'when buckets on GCS' do context 'when buckets on GCS' do
context 'when BUFFER_SIZE is larger than file size' do context 'when BUFFER_SIZE is larger than file size' do
before do before do
stub_remote_trace_200 stub_remote_url_200(url, file_path)
set_larger_buffer_size_than(size) set_larger_buffer_size_than(size)
end end
...@@ -117,7 +120,7 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -117,7 +120,7 @@ describe Gitlab::Ci::Trace::HttpIO do
context 'when there are no network issue' do context 'when there are no network issue' do
before do before do
stub_remote_trace_206 stub_remote_url_206(url, file_path)
end end
context 'when read whole size' do context 'when read whole size' do
...@@ -129,7 +132,7 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -129,7 +132,7 @@ describe Gitlab::Ci::Trace::HttpIO do
end end
it 'reads a trace' do it 'reads a trace' do
is_expected.to eq(remote_trace_body) is_expected.to eq(file_body)
end end
end end
...@@ -139,7 +142,7 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -139,7 +142,7 @@ describe Gitlab::Ci::Trace::HttpIO do
end end
it 'reads a trace' do it 'reads a trace' do
is_expected.to eq(remote_trace_body) is_expected.to eq(file_body)
end end
end end
end end
...@@ -153,7 +156,7 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -153,7 +156,7 @@ describe Gitlab::Ci::Trace::HttpIO do
end end
it 'reads a trace' do it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length]) is_expected.to eq(file_body[0, length])
end end
end end
...@@ -163,7 +166,7 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -163,7 +166,7 @@ describe Gitlab::Ci::Trace::HttpIO do
end end
it 'reads a trace' do it 'reads a trace' do
is_expected.to eq(remote_trace_body[0, length]) is_expected.to eq(file_body[0, length])
end end
end end
end end
...@@ -177,7 +180,7 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -177,7 +180,7 @@ describe Gitlab::Ci::Trace::HttpIO do
end end
it 'reads a trace' do it 'reads a trace' do
is_expected.to eq(remote_trace_body) is_expected.to eq(file_body)
end end
end end
...@@ -187,7 +190,7 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -187,7 +190,7 @@ describe Gitlab::Ci::Trace::HttpIO do
end end
it 'reads a trace' do it 'reads a trace' do
is_expected.to eq(remote_trace_body) is_expected.to eq(file_body)
end end
end end
end end
...@@ -221,11 +224,11 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -221,11 +224,11 @@ describe Gitlab::Ci::Trace::HttpIO do
let(:length) { nil } let(:length) { nil }
before do before do
stub_remote_trace_500 stub_remote_url_500(url)
end end
it 'reads a trace' do it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) expect { subject }.to raise_error(Gitlab::HttpIO::FailedToGetChunkError)
end end
end end
end end
...@@ -233,15 +236,15 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -233,15 +236,15 @@ describe Gitlab::Ci::Trace::HttpIO do
describe '#readline' do describe '#readline' do
subject { http_io.readline } subject { http_io.readline }
let(:string_io) { StringIO.new(remote_trace_body) } let(:string_io) { StringIO.new(file_body) }
before do before do
stub_remote_trace_206 stub_remote_url_206(url, file_path)
end end
shared_examples 'all line matching' do shared_examples 'all line matching' do
it 'reads a line' do it 'reads a line' do
(0...remote_trace_body.lines.count).each do (0...file_body.lines.count).each do
expect(http_io.readline).to eq(string_io.readline) expect(http_io.readline).to eq(string_io.readline)
end end
end end
...@@ -251,11 +254,11 @@ describe Gitlab::Ci::Trace::HttpIO do ...@@ -251,11 +254,11 @@ describe Gitlab::Ci::Trace::HttpIO do
let(:length) { nil } let(:length) { nil }
before do before do
stub_remote_trace_500 stub_remote_url_500(url)
end end
it 'reads a trace' do it 'reads a trace' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError) expect { subject }.to raise_error(Gitlab::HttpIO::FailedToGetChunkError)
end end
end end
......
...@@ -403,7 +403,6 @@ describe Gitlab::Shell do ...@@ -403,7 +403,6 @@ describe Gitlab::Shell do
end end
describe '#create_repository' do describe '#create_repository' do
shared_examples '#create_repository' do
let(:repository_storage) { 'default' } let(:repository_storage) { 'default' }
let(:repository_storage_path) do let(:repository_storage_path) do
Gitlab::GitalyClient::StorageSettings.allow_disk_access do Gitlab::GitalyClient::StorageSettings.allow_disk_access do
...@@ -437,15 +436,6 @@ describe Gitlab::Shell do ...@@ -437,15 +436,6 @@ describe Gitlab::Shell do
end end
end end
context 'with gitaly' do
it_behaves_like '#create_repository'
end
context 'without gitaly', :skip_gitaly_mock do
it_behaves_like '#create_repository'
end
end
describe '#remove_repository' do describe '#remove_repository' do
let!(:project) { create(:project, :repository, :legacy_storage) } let!(:project) { create(:project, :repository, :legacy_storage) }
let(:disk_path) { "#{project.disk_path}.git" } let(:disk_path) { "#{project.disk_path}.git" }
...@@ -513,22 +503,12 @@ describe Gitlab::Shell do ...@@ -513,22 +503,12 @@ describe Gitlab::Shell do
end end
end end
shared_examples 'fetch_remote' do |gitaly_on| describe '#fetch_remote' do
def fetch_remote(ssh_auth = nil, prune = true) def fetch_remote(ssh_auth = nil, prune = true)
gitlab_shell.fetch_remote(repository.raw_repository, 'remote-name', ssh_auth: ssh_auth, prune: prune) gitlab_shell.fetch_remote(repository.raw_repository, 'remote-name', ssh_auth: ssh_auth, prune: prune)
end end
def expect_gitlab_projects(fail = false, options = {}) def expect_call(fail, options = {})
expect(gitlab_projects).to receive(:fetch_remote).with(
'remote-name',
timeout,
options
).and_return(!fail)
allow(gitlab_projects).to receive(:output).and_return('error') if fail
end
def expect_gitaly_call(fail, options = {})
receive_fetch_remote = receive_fetch_remote =
if fail if fail
receive(:fetch_remote).and_raise(GRPC::NotFound) receive(:fetch_remote).and_raise(GRPC::NotFound)
...@@ -539,16 +519,6 @@ describe Gitlab::Shell do ...@@ -539,16 +519,6 @@ describe Gitlab::Shell do
expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive_fetch_remote expect_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive_fetch_remote
end end
if gitaly_on
def expect_call(fail, options = {})
expect_gitaly_call(fail, options)
end
else
def expect_call(fail, options = {})
expect_gitlab_projects(fail, options)
end
end
def build_ssh_auth(opts = {}) def build_ssh_auth(opts = {})
defaults = { defaults = {
ssh_import?: true, ssh_import?: true,
...@@ -634,14 +604,6 @@ describe Gitlab::Shell do ...@@ -634,14 +604,6 @@ describe Gitlab::Shell do
expect(fetch_remote(ssh_auth)).to be_truthy expect(fetch_remote(ssh_auth)).to be_truthy
end end
end end
end
describe '#fetch_remote local', :skip_gitaly_mock do
it_should_behave_like 'fetch_remote', false
end
describe '#fetch_remote gitaly' do
it_should_behave_like 'fetch_remote', true
context 'gitaly call' do context 'gitaly call' do
let(:remote_name) { 'remote-name' } let(:remote_name) { 'remote-name' }
...@@ -683,25 +645,6 @@ describe Gitlab::Shell do ...@@ -683,25 +645,6 @@ describe Gitlab::Shell do
end.to raise_error(Gitlab::Shell::Error, "error") end.to raise_error(Gitlab::Shell::Error, "error")
end end
end end
context 'without gitaly', :disable_gitaly do
it 'returns true when the command succeeds' do
expect(gitlab_projects).to receive(:import_project).with(import_url, timeout) { true }
result = gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url)
expect(result).to be_truthy
end
it 'raises an exception when the command fails' do
allow(gitlab_projects).to receive(:output) { 'error' }
expect(gitlab_projects).to receive(:import_project) { false }
expect do
gitlab_shell.import_repository(project.repository_storage, project.disk_path, import_url)
end.to raise_error(Gitlab::Shell::Error, "error")
end
end
end end
end end
......
...@@ -36,7 +36,6 @@ describe Gitlab::Workhorse do ...@@ -36,7 +36,6 @@ describe Gitlab::Workhorse do
allow(described_class).to receive(:git_archive_cache_disabled?).and_return(cache_disabled) allow(described_class).to receive(:git_archive_cache_disabled?).and_return(cache_disabled)
end end
context 'when Gitaly workhorse_archive feature is enabled' do
it 'sets the header correctly' do it 'sets the header correctly' do
key, command, params = decode_workhorse_header(subject) key, command, params = decode_workhorse_header(subject)
...@@ -53,7 +52,6 @@ describe Gitlab::Workhorse do ...@@ -53,7 +52,6 @@ describe Gitlab::Workhorse do
expect(params).to include({ 'DisableCache' => true }) expect(params).to include({ 'DisableCache' => true })
end end
end end
end
context "when the repository doesn't have an archive file path" do context "when the repository doesn't have an archive file path" do
before do before do
......
...@@ -2689,4 +2689,58 @@ describe Ci::Build do ...@@ -2689,4 +2689,58 @@ describe Ci::Build do
end end
end end
end end
describe '#artifacts_metadata_entry' do
set(:build) { create(:ci_build, project: project) }
let(:path) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' }
before do
stub_artifacts_object_storage
end
subject { build.artifacts_metadata_entry(path) }
context 'when using local storage' do
let!(:metadata) { create(:ci_job_artifact, :metadata, job: build) }
context 'for existing file' do
it 'does exist' do
is_expected.to be_exists
end
end
context 'for non-existing file' do
let(:path) { 'invalid-file' }
it 'does not exist' do
is_expected.not_to be_exists
end
end
end
context 'when using remote storage' do
include HttpIOHelpers
let!(:metadata) { create(:ci_job_artifact, :remote_store, :metadata, job: build) }
let(:file_path) { expand_fixture_path('ci_build_artifacts_metadata.gz') }
before do
stub_remote_url_206(metadata.file.url, file_path)
end
context 'for existing file' do
it 'does exist' do
is_expected.to be_exists
end
end
context 'for non-existing file' do
let(:path) { 'invalid-file' }
it 'does not exist' do
is_expected.not_to be_exists
end
end
end
end
end end
...@@ -431,6 +431,18 @@ describe Repository do ...@@ -431,6 +431,18 @@ describe Repository do
it { is_expected.to be_falsey } it { is_expected.to be_falsey }
end end
context 'non merged branch' do
subject { repository.merged_to_root_ref?('fix') }
it { is_expected.to be_falsey }
end
context 'non existent branch' do
subject { repository.merged_to_root_ref?('non_existent_branch') }
it { is_expected.to be_nil }
end
end end
describe '#can_be_merged?' do describe '#can_be_merged?' do
...@@ -452,17 +464,11 @@ describe Repository do ...@@ -452,17 +464,11 @@ describe Repository do
it { is_expected.to be_falsey } it { is_expected.to be_falsey }
end end
context 'non merged branch' do context 'submodule changes that confuse rugged' do
subject { repository.merged_to_root_ref?('fix') } subject { repository.can_be_merged?('update-gitlab-shell-v-6-0-1', 'update-gitlab-shell-v-6-0-3') }
it { is_expected.to be_falsey } it { is_expected.to be_falsey }
end end
context 'non existent branch' do
subject { repository.merged_to_root_ref?('non_existent_branch') }
it { is_expected.to be_nil }
end
end end
describe '#commit' do describe '#commit' do
......
...@@ -265,14 +265,22 @@ describe API::Groups do ...@@ -265,14 +265,22 @@ describe API::Groups do
projects projects
end end
def response_project_ids(json_response, key)
json_response[key].map do |project|
project['id'].to_i
end
end
context 'when unauthenticated' do context 'when unauthenticated' do
it 'returns 404 for a private group' do it 'returns 404 for a private group' do
get api("/groups/#{group2.id}") get api("/groups/#{group2.id}")
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
it 'returns 200 for a public group' do it 'returns 200 for a public group' do
get api("/groups/#{group1.id}") get api("/groups/#{group1.id}")
expect(response).to have_gitlab_http_status(200) expect(response).to have_gitlab_http_status(200)
end end
...@@ -282,7 +290,7 @@ describe API::Groups do ...@@ -282,7 +290,7 @@ describe API::Groups do
get api("/groups/#{public_group.id}") get api("/groups/#{public_group.id}")
expect(json_response['projects'].map { |p| p['id'].to_i }) expect(response_project_ids(json_response, 'projects'))
.to contain_exactly(projects[:public].id) .to contain_exactly(projects[:public].id)
end end
...@@ -292,7 +300,7 @@ describe API::Groups do ...@@ -292,7 +300,7 @@ describe API::Groups do
get api("/groups/#{group1.id}") get api("/groups/#{group1.id}")
expect(json_response['shared_projects'].map { |p| p['id'].to_i }) expect(response_project_ids(json_response, 'shared_projects'))
.to contain_exactly(projects[:public].id) .to contain_exactly(projects[:public].id)
end end
end end
...@@ -323,6 +331,17 @@ describe API::Groups do ...@@ -323,6 +331,17 @@ describe API::Groups do
expect(json_response['shared_projects'][0]['id']).to eq(project.id) expect(json_response['shared_projects'][0]['id']).to eq(project.id)
end end
it "returns one of user1's groups without projects when with_projects option is set to false" do
project = create(:project, namespace: group2, path: 'Foo')
create(:project_group_link, project: project, group: group1)
get api("/groups/#{group1.id}", user1), with_projects: false
expect(response).to have_gitlab_http_status(200)
expect(json_response['projects']).to be_nil
expect(json_response['shared_projects']).to be_nil
end
it "does not return a non existing group" do it "does not return a non existing group" do
get api("/groups/1328", user1) get api("/groups/1328", user1)
...@@ -341,7 +360,7 @@ describe API::Groups do ...@@ -341,7 +360,7 @@ describe API::Groups do
get api("/groups/#{public_group.id}", user2) get api("/groups/#{public_group.id}", user2)
expect(json_response['projects'].map { |p| p['id'].to_i }) expect(response_project_ids(json_response, 'projects'))
.to contain_exactly(projects[:public].id, projects[:internal].id) .to contain_exactly(projects[:public].id, projects[:internal].id)
end end
...@@ -351,7 +370,7 @@ describe API::Groups do ...@@ -351,7 +370,7 @@ describe API::Groups do
get api("/groups/#{group1.id}", user2) get api("/groups/#{group1.id}", user2)
expect(json_response['shared_projects'].map { |p| p['id'].to_i }) expect(response_project_ids(json_response, 'shared_projects'))
.to contain_exactly(projects[:public].id, projects[:internal].id) .to contain_exactly(projects[:public].id, projects[:internal].id)
end end
end end
......
...@@ -559,12 +559,14 @@ describe API::Jobs do ...@@ -559,12 +559,14 @@ describe API::Jobs do
context 'authorized user' do context 'authorized user' do
context 'when trace is in ObjectStorage' do context 'when trace is in ObjectStorage' do
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) } let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
let(:url) { 'http://object-storage/trace' }
let(:file_path) { expand_fixture_path('trace/sample_trace') }
before do before do
stub_remote_trace_206 stub_remote_url_206(url, file_path)
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false } allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url } allow_any_instance_of(JobArtifactUploader).to receive(:url) { url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size } allow_any_instance_of(JobArtifactUploader).to receive(:size) { File.size(file_path) }
end end
it 'returns specific job trace' do it 'returns specific job trace' do
......
...@@ -1592,6 +1592,20 @@ describe API::Projects do ...@@ -1592,6 +1592,20 @@ describe API::Projects do
expect(response).to have_gitlab_http_status(400) expect(response).to have_gitlab_http_status(400)
end end
it 'updates avatar' do
project_param = {
avatar: fixture_file_upload('spec/fixtures/banana_sample.gif',
'image/gif')
}
put api("/projects/#{project3.id}", user), project_param
expect(response).to have_gitlab_http_status(200)
expect(json_response['avatar_url']).to eq('http://localhost/uploads/'\
'-/system/project/avatar/'\
"#{project3.id}/banana_sample.gif")
end
end end
context 'when authenticated as project master' do context 'when authenticated as project master' do
......
...@@ -2,8 +2,8 @@ require 'spec_helper' ...@@ -2,8 +2,8 @@ require 'spec_helper'
describe MergeRequests::Conflicts::ListService do describe MergeRequests::Conflicts::ListService do
describe '#can_be_resolved_in_ui?' do describe '#can_be_resolved_in_ui?' do
def create_merge_request(source_branch) def create_merge_request(source_branch, target_branch = 'conflict-start')
create(:merge_request, source_branch: source_branch, target_branch: 'conflict-start', merge_status: :unchecked) do |mr| create(:merge_request, source_branch: source_branch, target_branch: target_branch, merge_status: :unchecked) do |mr|
mr.mark_as_unmergeable mr.mark_as_unmergeable
end end
end end
...@@ -84,5 +84,11 @@ describe MergeRequests::Conflicts::ListService do ...@@ -84,5 +84,11 @@ describe MergeRequests::Conflicts::ListService do
expect(service.can_be_resolved_in_ui?).to be_falsey expect(service.can_be_resolved_in_ui?).to be_falsey
end end
it 'returns a falsey value when the conflict is in a submodule revision' do
merge_request = create_merge_request('update-gitlab-shell-v-6-0-3', 'update-gitlab-shell-v-6-0-1')
expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
end
end end
end end
...@@ -49,7 +49,9 @@ module TestEnv ...@@ -49,7 +49,9 @@ module TestEnv
'add-pdf-file' => 'e774ebd', 'add-pdf-file' => 'e774ebd',
'squash-large-files' => '54cec52', 'squash-large-files' => '54cec52',
'add-pdf-text-binary' => '79faa7b', 'add-pdf-text-binary' => '79faa7b',
'add_images_and_changes' => '010d106' 'add_images_and_changes' => '010d106',
'update-gitlab-shell-v-6-0-1' => '2f61d70',
'update-gitlab-shell-v-6-0-3' => 'de78448'
}.freeze }.freeze
# gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily # gitlab-test-fork is a fork of gitlab-fork, but we don't necessarily
......
module HttpIOHelpers module HttpIOHelpers
def stub_remote_trace_206 def stub_remote_url_206(url, file_path)
WebMock.stub_request(:get, remote_trace_url) WebMock.stub_request(:get, url)
.to_return { |request| remote_trace_response(request, 206) } .to_return { |request| remote_url_response(file_path, request, 206) }
end end
def stub_remote_trace_200 def stub_remote_url_200(url, file_path)
WebMock.stub_request(:get, remote_trace_url) WebMock.stub_request(:get, url)
.to_return { |request| remote_trace_response(request, 200) } .to_return { |request| remote_url_response(file_path, request, 200) }
end end
def stub_remote_trace_500 def stub_remote_url_500(url)
WebMock.stub_request(:get, remote_trace_url) WebMock.stub_request(:get, url)
.to_return(status: [500, "Internal Server Error"]) .to_return(status: [500, "Internal Server Error"])
end end
def remote_trace_url def remote_url_response(file_path, request, response_status)
"http://trace.com/trace"
end
def remote_trace_response(request, responce_status)
range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/) range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/)
body = File.read(file_path).force_encoding(Encoding::BINARY)
size = body.bytesize
{ {
status: responce_status, status: response_status,
headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i), headers: remote_url_response_headers(response_status, range[1].to_i, range[2].to_i, size),
body: range_trace_body(range[1].to_i, range[2].to_i) body: body[range[1].to_i..range[2].to_i]
} }
end end
def remote_trace_response_headers(responce_status, from, to) def remote_url_response_headers(response_status, from, to, size)
headers = { 'Content-Type' => 'text/plain' } { 'Content-Type' => 'text/plain' }.tap do |headers|
if response_status == 206
if responce_status == 206 headers.merge('Content-Range' => "bytes #{from}-#{to}/#{size}")
headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}")
end
headers
end end
def range_trace_body(from, to)
remote_trace_body[from..to]
end end
def remote_trace_body
@remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace'))
.force_encoding(Encoding::BINARY)
end
def remote_trace_size
remote_trace_body.bytesize
end end
def set_smaller_buffer_size_than(file_size) def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128) blocks = (file_size / 128)
new_size = (blocks / 2) * 128 new_size = (blocks / 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size) stub_const("Gitlab::HttpIO::BUFFER_SIZE", new_size)
end end
def set_larger_buffer_size_than(file_size) def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128) blocks = (file_size / 128)
new_size = (blocks * 2) * 128 new_size = (blocks * 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size) stub_const("Gitlab::HttpIO::BUFFER_SIZE", new_size)
end end
end end
...@@ -68,4 +68,66 @@ describe GitlabUploader do ...@@ -68,4 +68,66 @@ describe GitlabUploader do
expect(subject.file.path).to match(/#{subject.cache_dir}/) expect(subject.file.path).to match(/#{subject.cache_dir}/)
end end
end end
describe '#open' do
context 'when trace is stored in File storage' do
context 'when file exists' do
let(:file) do
fixture_file_upload('spec/fixtures/trace/sample_trace', 'text/plain')
end
before do
subject.store!(file)
end
it 'returns io stream' do
expect(subject.open).to be_a(IO)
end
it 'when passing block it yields' do
expect { |b| subject.open(&b) }.to yield_control
end
end
context 'when file does not exist' do
it 'returns nil' do
expect(subject.open).to be_nil
end
it 'when passing block it does not yield' do
expect { |b| subject.open(&b) }.not_to yield_control
end
end
end
context 'when trace is stored in Object storage' do
before do
allow(subject).to receive(:file_storage?) { false }
end
context 'when file exists' do
before do
allow(subject).to receive(:url) { 'http://object_storage.com/trace' }
end
it 'returns http io stream' do
expect(subject.open).to be_a(Gitlab::HttpIO)
end
it 'when passing block it yields' do
expect { |b| subject.open(&b) }.to yield_control.once
end
end
context 'when file does not exist' do
it 'returns nil' do
expect(subject.open).to be_nil
end
it 'when passing block it does not yield' do
expect { |b| subject.open(&b) }.not_to yield_control
end
end
end
end
end end
...@@ -23,43 +23,6 @@ describe JobArtifactUploader do ...@@ -23,43 +23,6 @@ describe JobArtifactUploader do
store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z] store_dir: %r[\h{2}/\h{2}/\h{64}/\d{4}_\d{1,2}_\d{1,2}/\d+/\d+\z]
end end
describe '#open' do
subject { uploader.open }
context 'when trace is stored in File storage' do
context 'when file exists' do
let(:file) do
fixture_file_upload('spec/fixtures/trace/sample_trace', 'text/plain')
end
before do
uploader.store!(file)
end
it 'returns io stream' do
is_expected.to be_a(IO)
end
end
context 'when file does not exist' do
it 'returns nil' do
is_expected.to be_nil
end
end
end
context 'when trace is stored in Object storage' do
before do
allow(uploader).to receive(:file_storage?) { false }
allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
end
it 'returns http io stream' do
is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
end
end
end
context 'file is stored in valid local_path' do context 'file is stored in valid local_path' do
let(:file) do let(:file) do
fixture_file_upload('spec/fixtures/ci_build_artifacts.zip', 'application/zip') fixture_file_upload('spec/fixtures/ci_build_artifacts.zip', 'application/zip')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment