Skip to content
Snippets Groups Projects
Verified Commit 3b5368c0 authored by Tomasz Maczukin's avatar Tomasz Maczukin :speech_balloon:
Browse files

Introduce modular structure for shells/cache

parent edd7ee23
No related branches found
No related tags found
No related merge requests found
This commit is part of merge request !968. Comments created here will be created in the context of that merge request.
......@@ -154,7 +154,8 @@ mocks: $(MOCKERY)
GOPATH=$(ORIGINAL_GOPATH) mockery $(MOCKERY_FLAGS) -dir=./vendor/github.com/ayufan/golang-kardianos-service -output=./helpers/service/mocks -name='(Interface|Logger)'
GOPATH=$(ORIGINAL_GOPATH) mockery $(MOCKERY_FLAGS) -dir=./helpers/docker -all -inpkg
GOPATH=$(ORIGINAL_GOPATH) mockery $(MOCKERY_FLAGS) -dir=./common -all -inpkg
GOPATH=$(ORIGINAL_GOPATH) mockery $(MOCKERY_FLAGS) -dir=./shells -name fakeIAMCredentialsProvider -inpkg
GOPATH=$(ORIGINAL_GOPATH) mockery $(MOCKERY_FLAGS) -dir=./shells/cache -all -inpkg
GOPATH=$(ORIGINAL_GOPATH) mockery $(MOCKERY_FLAGS) -dir=./shells/cache/s3 -all -inpkg
test-docker:
make test-docker-image IMAGE=centos:6 TYPE=rpm
......
package shells
import (
"bytes"
"encoding/xml"
"io/ioutil"
"net/http"
"net/url"
"path"
"strconv"
"time"
"github.com/minio/minio-go"
"github.com/minio/minio-go/pkg/credentials"
"github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitlab-runner/common"
"gitlab.com/gitlab-org/gitlab-runner/shells/cache"
)
type bucketLocationTripper struct {
bucketLocation string
}
// The Minio Golang library always attempts to query the bucket location and
// currently has no way of statically setting that value. To avoid that
// lookup, the Runner cache uses the library only to generate the URLs,
// forgoing the library's API for uploading and downloading files. The custom
// Roundtripper stubs out any network requests that would normally be made via
// the library.
func (b *bucketLocationTripper) RoundTrip(req *http.Request) (res *http.Response, err error) {
var buffer bytes.Buffer
xml.NewEncoder(&buffer).Encode(b.bucketLocation)
res = &http.Response{
StatusCode: http.StatusOK,
Body: ioutil.NopCloser(&buffer),
}
return
}
func (b *bucketLocationTripper) CancelRequest(req *http.Request) {
// Do nothing
}
func getCacheObjectName(build *common.Build, cache *common.CacheConfig, key string) string {
if key == "" {
return ""
}
runnerSegment := ""
if !cache.Shared {
runnerSegment = path.Join("runner", build.Runner.ShortDescription())
}
return path.Join(cache.Path, runnerSegment, "project", strconv.Itoa(build.JobInfo.ProjectID), key)
}
type fakeIAMCredentialsProvider interface {
credentials.Provider
}
var iamFactory = func() *credentials.Credentials {
return credentials.NewIAM("")
}
func getCacheStorageClient(cache *common.CacheConfig) (scl *minio.Client, err error) {
// If the server address or credentials aren't specified then use IAM
// instance profile credentials and talk to "real" S3.
if cache.ServerAddress == "" || cache.AccessKey == "" || cache.SecretKey == "" {
iam := iamFactory()
scl, err = minio.NewWithCredentials("s3.amazonaws.com", iam, true, "")
} else {
scl, err = minio.New(cache.ServerAddress, cache.AccessKey, cache.SecretKey, !cache.Insecure)
}
if err != nil {
logrus.Warningln(err)
return
}
scl.SetCustomTransport(&bucketLocationTripper{cache.BucketLocation})
return
}
func getS3DownloadURL(build *common.Build, key string) (url *url.URL) {
cache := build.Runner.Cache
objectName := getCacheObjectName(build, cache, key)
if objectName == "" {
return
}
scl, err := getCacheStorageClient(cache)
if err != nil {
logrus.Warningln(err)
return
}
url, err = scl.PresignedGetObject(cache.BucketName, objectName, time.Second*time.Duration(build.RunnerInfo.Timeout), nil)
if err != nil {
logrus.Warningln(err)
return
}
return
}
func getCacheDownloadURL(build *common.Build, key string) (url *url.URL) {
cache := build.Runner.Cache
if cache == nil {
return
}
switch cache.Type {
case "s3":
return getS3DownloadURL(build, key)
}
return
}
func getS3UploadURL(build *common.Build, key string) (url *url.URL) {
cache := build.Runner.Cache
objectName := getCacheObjectName(build, cache, key)
if objectName == "" {
adapter := cache.Factory(build, key)
if adapter == nil {
return
}
scl, err := getCacheStorageClient(cache)
if err != nil {
logrus.Warningln(err)
return
}
url, err = scl.PresignedPutObject(cache.BucketName, objectName, time.Second*time.Duration(build.RunnerInfo.Timeout))
if err != nil {
logrus.Warningln(err)
return
}
return
return adapter.GetDownloadURL()
}
func getCacheUploadURL(build *common.Build, key string) (url *url.URL) {
cache := build.Runner.Cache
if cache == nil {
adapter := cache.Factory(build, key)
if adapter == nil {
return
}
switch cache.Type {
case "s3":
return getS3UploadURL(build, key)
}
return
return adapter.GetUploadURL()
}
package cache
import (
"net/url"
"path"
"strconv"
"github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitlab-runner/common"
"gitlab.com/gitlab-org/gitlab-runner/shells/cache/s3"
)
type Adapter interface {
SetBuild(build *common.Build)
SetConfig(config *common.CacheConfig)
SetObjectName(objectName string)
GetDownloadURL() *url.URL
GetUploadURL() *url.URL
}
type initializer func() Adapter
var adapters = map[string]initializer{
"s3": func() Adapter { return new(s3.Adapter) },
}
var Factory = func(build *common.Build, key string) Adapter {
defined, config := cacheConfig(build)
if !defined {
logrus.Debugln("Cache config not defined. Skipping adapter selection.")
return nil
}
init, ok := adapters[config.Type]
if !ok {
logrus.Errorf("Cache adapter of type '%s' is unknown", config.Type)
return nil
}
objectName := generateObjectName(build, config, key)
if objectName == "" {
logrus.Debugln("ObjectName is empty. Skipping adapter selection.")
return nil
}
adapter := init()
adapter.SetBuild(build)
adapter.SetConfig(config)
adapter.SetObjectName(objectName)
return adapter
}
func cacheConfig(build *common.Build) (defined bool, config *common.CacheConfig) {
if build == nil || build.Runner == nil || build.Runner.Cache == nil {
defined = false
return
}
defined = true
config = build.Runner.Cache
return
}
func generateObjectName(build *common.Build, config *common.CacheConfig, key string) string {
if key == "" {
return ""
}
runnerSegment := ""
if !config.Shared {
runnerSegment = path.Join("runner", build.Runner.ShortDescription())
}
return path.Join(config.Path, runnerSegment, "project", strconv.Itoa(build.JobInfo.ProjectID), key)
}
package cache
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"gitlab.com/gitlab-org/gitlab-runner/common"
)
func TestFactoryCreateExisting(t *testing.T) {
tests := map[string]struct {
key string
expectedNil bool
}{
"key-is-not-empty": {key: "key", expectedNil: false},
"key-is-empty": {key: "", expectedNil: true},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
adapterMock := new(MockAdapter)
adapters = map[string]initializer{
"test": func() Adapter { return adapterMock },
}
config := &common.CacheConfig{
Type: "test",
}
build := &common.Build{
Runner: &common.RunnerConfig{},
}
build.Runner.Cache = config
if test.key != "" {
adapterMock.On("SetBuild", build).Once()
adapterMock.On("SetConfig", config).Once()
adapterMock.On("SetObjectName", mock.Anything).Once()
}
defer adapterMock.AssertExpectations(t)
adapter := Factory(build, test.key)
if test.expectedNil {
assert.Nil(t, adapter)
} else {
assert.Equal(t, adapterMock, adapter)
}
})
}
}
func TestFactoryCreateUnexisting(t *testing.T) {
adapters = map[string]initializer{}
config := &common.CacheConfig{
Type: "test",
}
build := &common.Build{
Runner: &common.RunnerConfig{},
}
build.Runner.Cache = config
adapter := Factory(build, "key")
assert.Nil(t, adapter)
}
func defaultCacheConfig() *common.CacheConfig {
return &common.CacheConfig{
Type: "test",
BucketName: "test-bucket",
BucketLocation: "test-location",
}
}
func defaultBuild(cacheConfig *common.CacheConfig) *common.Build {
return &common.Build{
JobResponse: common.JobResponse{
JobInfo: common.JobInfo{
ProjectID: 10,
},
RunnerInfo: common.RunnerInfo{
Timeout: 3600,
},
},
Runner: &common.RunnerConfig{
RunnerCredentials: common.RunnerCredentials{
Token: "longtoken",
},
RunnerSettings: common.RunnerSettings{
Cache: cacheConfig,
},
},
}
}
func TestGenerateObjectNameWhenKeyIsEmptyResultIsAlsoEmpty(t *testing.T) {
s3Cache := defaultCacheConfig()
s3CacheBuild := defaultBuild(s3Cache)
url := generateObjectName(s3CacheBuild, s3Cache, "")
assert.Empty(t, url)
}
func TestGetCacheObjectName(t *testing.T) {
s3Cache := defaultCacheConfig()
s3CacheBuild := defaultBuild(s3Cache)
url := generateObjectName(s3CacheBuild, s3Cache, "key")
assert.Equal(t, "runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenPathIsSetThenUrlContainsIt(t *testing.T) {
s3Cache := defaultCacheConfig()
s3Cache.Path = "whatever"
s3CacheBuild := defaultBuild(s3Cache)
url := generateObjectName(s3CacheBuild, s3Cache, "key")
assert.Equal(t, "whatever/runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenPathHasMultipleSegmentIsSetThenUrlContainsIt(t *testing.T) {
s3Cache := defaultCacheConfig()
s3Cache.Path = "some/other/path/goes/here"
s3CacheBuild := defaultBuild(s3Cache)
url := generateObjectName(s3CacheBuild, s3Cache, "key")
assert.Equal(t, "some/other/path/goes/here/runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenPathIsNotSetThenUrlDoesNotContainIt(t *testing.T) {
s3Cache := defaultCacheConfig()
s3Cache.Path = ""
s3CacheBuild := defaultBuild(s3Cache)
url := generateObjectName(s3CacheBuild, s3Cache, "key")
assert.Equal(t, "runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenSharedFlagIsFalseThenRunnerSegmentExistsInTheUrl(t *testing.T) {
s3Cache := defaultCacheConfig()
s3Cache.Shared = false
s3CacheBuild := defaultBuild(s3Cache)
url := generateObjectName(s3CacheBuild, s3Cache, "key")
assert.Equal(t, "runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenSharedFlagIsFalseThenRunnerSegmentShouldNotBePresent(t *testing.T) {
s3Cache := defaultCacheConfig()
s3Cache.Shared = true
s3CacheBuild := defaultBuild(s3Cache)
url := generateObjectName(s3CacheBuild, s3Cache, "key")
assert.Equal(t, "project/10/key", url)
}
// Code generated by mockery v1.0.0. DO NOT EDIT.
// This comment works around https://github.com/vektra/mockery/issues/155
package cache
import common "gitlab.com/gitlab-org/gitlab-runner/common"
import mock "github.com/stretchr/testify/mock"
import url "net/url"
// MockAdapter is an autogenerated mock type for the Adapter type
type MockAdapter struct {
mock.Mock
}
// GetDownloadURL provides a mock function with given fields:
func (_m *MockAdapter) GetDownloadURL() *url.URL {
ret := _m.Called()
var r0 *url.URL
if rf, ok := ret.Get(0).(func() *url.URL); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*url.URL)
}
}
return r0
}
// GetUploadURL provides a mock function with given fields:
func (_m *MockAdapter) GetUploadURL() *url.URL {
ret := _m.Called()
var r0 *url.URL
if rf, ok := ret.Get(0).(func() *url.URL); ok {
r0 = rf()
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*url.URL)
}
}
return r0
}
// SetBuild provides a mock function with given fields: build
func (_m *MockAdapter) SetBuild(build *common.Build) {
_m.Called(build)
}
// SetConfig provides a mock function with given fields: config
func (_m *MockAdapter) SetConfig(config *common.CacheConfig) {
_m.Called(config)
}
// SetObjectName provides a mock function with given fields: objectName
func (_m *MockAdapter) SetObjectName(objectName string) {
_m.Called(objectName)
}
package s3
import (
"net/url"
"time"
"github.com/minio/minio-go"
"github.com/minio/minio-go/pkg/credentials"
"github.com/sirupsen/logrus"
"gitlab.com/gitlab-org/gitlab-runner/common"
)
type fakeIAMCredentialsProvider interface {
credentials.Provider
}
var iamFactory = func() *credentials.Credentials {
return credentials.NewIAM("")
}
type s3URLGenerator func(scl *minio.Client, bucketName string, objectName string, expires time.Duration) (*url.URL, error)
type Adapter struct {
build *common.Build
config *common.CacheConfig
objectName string
}
func (a *Adapter) SetBuild(build *common.Build) {
a.build = build
}
func (a *Adapter) SetConfig(config *common.CacheConfig) {
a.config = config
}
func (a *Adapter) SetObjectName(objectName string) {
a.objectName = objectName
}
func (a *Adapter) GetDownloadURL() *url.URL {
return a.getS3URL(getS3DownloadURL)
}
func getS3DownloadURL(scl *minio.Client, bucketName string, objectName string, expires time.Duration) (*url.URL, error) {
return scl.PresignedGetObject(bucketName, objectName, expires, nil)
}
func (a *Adapter) GetUploadURL() *url.URL {
return a.getS3URL(getS3UploadURL)
}
func getS3UploadURL(scl *minio.Client, bucketName string, objectName string, expires time.Duration) (*url.URL, error) {
return scl.PresignedPutObject(bucketName, objectName, expires)
}
func (a *Adapter) getS3URL(generator s3URLGenerator) (url *url.URL) {
scl, err := a.getCacheStorageClient()
if err != nil {
logrus.Errorf("error while creating S3 cache storage client: %v", err)
return
}
url, err = generator(scl, a.config.BucketName, a.objectName, time.Second*time.Duration(a.build.RunnerInfo.Timeout))
if err != nil {
logrus.Errorf("error while generating S3 pre-signed URL: %v", err)
return
}
return
}
func (a *Adapter) getCacheStorageClient() (scl *minio.Client, err error) {
// If the server address or credentials aren't specified then use IAM
// instance profile credentials and talk to "real" S3.
if a.config.ServerAddress == "" || a.config.AccessKey == "" || a.config.SecretKey == "" {
iam := iamFactory()
scl, err = minio.NewWithCredentials("s3.amazonaws.com", iam, true, "")
} else {
scl, err = minio.New(a.config.ServerAddress, a.config.AccessKey, a.config.SecretKey, !a.config.Insecure)
}
if err != nil {
return
}
scl.SetCustomTransport(&bucketLocationTripper{a.config.BucketLocation})
return
}
package s3
import (
"testing"
"github.com/minio/minio-go/pkg/credentials"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab-runner/common"
)
func defaultS3CacheFactory() *common.CacheConfig {
return &common.CacheConfig{
Type: "s3",
ServerAddress: "server.com",
AccessKey: "access",
SecretKey: "key",
BucketName: "test",
BucketLocation: "location",
}
}
func iamS3CacheFactory(t *testing.T) (*common.CacheConfig, *mockFakeIAMCredentialsProvider) {
cacheConfig := defaultS3CacheFactory()
cacheConfig.ServerAddress = ""
cacheConfig.AccessKey = ""
cacheConfig.SecretKey = ""
iamProvider := &mockFakeIAMCredentialsProvider{}
iamFactory = func() *credentials.Credentials {
return credentials.New(iamProvider)
}
return cacheConfig, iamProvider
}
func defaults3CacheBuild(cacheConfig *common.CacheConfig) *common.Build {
return &common.Build{
JobResponse: common.JobResponse{
RunnerInfo: common.RunnerInfo{
Timeout: 3600,
},
},
Runner: &common.RunnerConfig{
RunnerSettings: common.RunnerSettings{
Cache: cacheConfig,
},
},
}
}
func TestS3CacheUploadURL(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Insecure = false
s3CacheBuild := defaults3CacheBuild(s3Cache)
adapter := new(Adapter)
adapter.SetBuild(s3CacheBuild)
adapter.SetConfig(s3Cache)
adapter.SetObjectName("key")
url := adapter.GetUploadURL()
require.NotNil(t, url)
assert.Equal(t, s3Cache.ServerAddress, url.Host)
assert.Regexp(t, "^https://", url)
assert.Contains(t, url.String(), "X-Amz-Credential=access%2F")
}
func TestS3CacheUploadURLForIamCredentials(t *testing.T) {
s3Cache, iamProvider := iamS3CacheFactory(t)
fakeValue := credentials.Value{
AccessKeyID: "access-from-iam",
SecretAccessKey: "secret-from-iam",
}
iamProvider.On("Retrieve").Return(fakeValue, nil).Once()
iamProvider.On("IsExpired").Return(false)
defer iamProvider.AssertExpectations(t)
s3Cache.Insecure = false
s3CacheBuild := defaults3CacheBuild(s3Cache)
adapter := new(Adapter)
adapter.SetBuild(s3CacheBuild)
adapter.SetConfig(s3Cache)
adapter.SetObjectName("key")
url := adapter.GetUploadURL()
require.NotNil(t, url)
assert.Equal(t, "test.s3.amazonaws.com", url.Host)
assert.Regexp(t, "^https://", url)
assert.Contains(t, url.String(), "X-Amz-Credential=access-from-iam%2F")
}
func TestS3CacheUploadInsecureURL(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Insecure = true
s3CacheBuild := defaults3CacheBuild(s3Cache)
adapter := new(Adapter)
adapter.SetBuild(s3CacheBuild)
adapter.SetConfig(s3Cache)
adapter.SetObjectName("key")
url := adapter.GetUploadURL()
require.NotNil(t, url)
assert.Equal(t, s3Cache.ServerAddress, url.Host)
assert.Regexp(t, "^http://", url)
}
func TestS3CacheDownloadURL(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Insecure = false
s3CacheBuild := defaults3CacheBuild(s3Cache)
adapter := new(Adapter)
adapter.SetBuild(s3CacheBuild)
adapter.SetConfig(s3Cache)
adapter.SetObjectName("key")
url := adapter.GetDownloadURL()
require.NotNil(t, url)
assert.Equal(t, s3Cache.ServerAddress, url.Host)
assert.Regexp(t, "^https://", url)
}
func TestS3CacheDownloadInsecureURL(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Insecure = true
s3CacheBuild := defaults3CacheBuild(s3Cache)
adapter := new(Adapter)
adapter.SetBuild(s3CacheBuild)
adapter.SetConfig(s3Cache)
adapter.SetObjectName("key")
url := adapter.GetDownloadURL()
require.NotNil(t, url)
assert.Equal(t, s3Cache.ServerAddress, url.Host)
assert.Regexp(t, "^http://", url)
}
package s3
import (
"bytes"
"encoding/xml"
"io/ioutil"
"net/http"
)
type bucketLocationTripper struct {
bucketLocation string
}
// The Minio Golang library always attempts to query the bucket location and
// currently has no way of statically setting that value. To avoid that
// lookup, the Runner cache uses the library only to generate the URLs,
// forgoing the library's API for uploading and downloading files. The custom
// Roundtripper stubs out any network requests that would normally be made via
// the library.
func (b *bucketLocationTripper) RoundTrip(req *http.Request) (res *http.Response, err error) {
var buffer bytes.Buffer
xml.NewEncoder(&buffer).Encode(b.bucketLocation)
res = &http.Response{
StatusCode: http.StatusOK,
Body: ioutil.NopCloser(&buffer),
}
return
}
func (b *bucketLocationTripper) CancelRequest(req *http.Request) {
// Do nothing
}
......@@ -2,7 +2,7 @@
// This comment works around https://github.com/vektra/mockery/issues/155
package shells
package s3
import credentials "github.com/minio/minio-go/pkg/credentials"
import mock "github.com/stretchr/testify/mock"
......
package shells
import (
"net/url"
"testing"
"github.com/minio/minio-go/pkg/credentials"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitlab-runner/common"
"gitlab.com/gitlab-org/gitlab-runner/shells/cache"
)
func defaultS3CacheFactory() *common.CacheConfig {
return &common.CacheConfig{
Type: "s3",
ServerAddress: "server.com",
AccessKey: "access",
SecretKey: "key",
BucketName: "test",
BucketLocation: "location",
}
}
func iamS3CacheFactory(t *testing.T) (*common.CacheConfig, *mockFakeIAMCredentialsProvider) {
cacheConfig := defaultS3CacheFactory()
cacheConfig.ServerAddress = ""
cacheConfig.AccessKey = ""
cacheConfig.SecretKey = ""
iamProvider := &mockFakeIAMCredentialsProvider{}
iamFactory = func() *credentials.Credentials {
return credentials.New(iamProvider)
func TestCacheOperations(t *testing.T) {
type test struct {
testedOperation func(build *common.Build, key string) (url *url.URL)
mockedOperation string
adapterExists bool
adapterURL *url.URL
expectedURL *url.URL
}
return cacheConfig, iamProvider
}
exampleURL, err := url.Parse("example.com")
require.NoError(t, err)
func defaults3CacheBuild(cacheConfig *common.CacheConfig) *common.Build {
return &common.Build{
JobResponse: common.JobResponse{
JobInfo: common.JobInfo{
ProjectID: 10,
},
RunnerInfo: common.RunnerInfo{
Timeout: 3600,
},
tests := map[string]test{
"download-url-adapter-exists": {
testedOperation: getCacheDownloadURL,
mockedOperation: "GetDownloadURL",
adapterExists: true,
adapterURL: exampleURL,
expectedURL: exampleURL,
},
"upload-url-adapter-exists": {
testedOperation: getCacheUploadURL,
mockedOperation: "GetUploadURL",
adapterExists: true,
adapterURL: exampleURL,
expectedURL: exampleURL,
},
Runner: &common.RunnerConfig{
RunnerCredentials: common.RunnerCredentials{
Token: "longtoken",
},
RunnerSettings: common.RunnerSettings{
Cache: cacheConfig,
},
"download-url-adapter-doesnt-exists": {
testedOperation: getCacheDownloadURL,
mockedOperation: "GetDownloadURL",
adapterExists: false,
adapterURL: exampleURL,
expectedURL: nil,
},
"upload-url-adapter-doesnt-exists": {
testedOperation: getCacheUploadURL,
mockedOperation: "GetUploadURL",
adapterExists: false,
adapterURL: exampleURL,
expectedURL: nil,
},
}
}
func TestS3CacheUploadURL(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Insecure = false
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheUploadURL(s3CacheBuild, "key")
require.NotNil(t, url)
assert.Equal(t, s3Cache.ServerAddress, url.Host)
assert.Regexp(t, "^https://", url)
assert.Contains(t, url.String(), "X-Amz-Credential=access%2F")
}
func TestS3CacheUploadURLForIamCredentials(t *testing.T) {
s3Cache, iamProvider := iamS3CacheFactory(t)
fakeValue := credentials.Value{
AccessKeyID: "access-from-iam",
SecretAccessKey: "secret-from-iam",
for name, tc := range tests {
t.Run(name, func(t *testing.T) {
build := &common.Build{
Runner: &common.RunnerConfig{},
}
key := "key"
var adapter cache.Adapter
if tc.adapterExists {
a := new(cache.MockAdapter)
a.On(tc.mockedOperation).Return(tc.adapterURL)
defer a.AssertExpectations(t)
adapter = a
}
cache.Factory = func(build *common.Build, key string) cache.Adapter {
return adapter
}
generatedURL := tc.testedOperation(build, key)
assert.Equal(t, tc.expectedURL, generatedURL)
})
}
iamProvider.On("Retrieve").Return(fakeValue, nil).Once()
iamProvider.On("IsExpired").Return(false)
defer iamProvider.AssertExpectations(t)
s3Cache.Insecure = false
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheUploadURL(s3CacheBuild, "key")
require.NotNil(t, url)
assert.Equal(t, "test.s3.amazonaws.com", url.Host)
assert.Regexp(t, "^https://", url)
assert.Contains(t, url.String(), "X-Amz-Credential=access-from-iam%2F")
}
func TestS3CacheUploadInsecureURL(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Insecure = true
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheUploadURL(s3CacheBuild, "key")
require.NotNil(t, url)
assert.Equal(t, s3Cache.ServerAddress, url.Host)
assert.Regexp(t, "^http://", url)
}
func TestS3CacheDownloadURL(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Insecure = false
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheDownloadURL(s3CacheBuild, "key")
require.NotNil(t, url)
assert.Equal(t, s3Cache.ServerAddress, url.Host)
assert.Regexp(t, "^https://", url)
}
func TestS3CacheDownloadInsecureURL(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Insecure = true
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheDownloadURL(s3CacheBuild, "key")
require.NotNil(t, url)
assert.Equal(t, s3Cache.ServerAddress, url.Host)
assert.Regexp(t, "^http://", url)
}
func TestGetCacheObjectNameWhenKeyIsEmptyResultIsAlsoEmpty(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheObjectName(s3CacheBuild, s3Cache, "")
require.Empty(t, url)
}
func TestGetCacheObjectName(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheObjectName(s3CacheBuild, s3Cache, "key")
require.Equal(t, "runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenPathIsSetThenUrlContainsIt(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Path = "whatever"
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheObjectName(s3CacheBuild, s3Cache, "key")
require.Equal(t, "whatever/runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenPathHasMultipleSegmentIsSetThenUrlContainsIt(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Path = "some/other/path/goes/here"
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheObjectName(s3CacheBuild, s3Cache, "key")
require.Equal(t, "some/other/path/goes/here/runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenPathIsNotSetThenUrlDoesNotContainIt(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Path = ""
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheObjectName(s3CacheBuild, s3Cache, "key")
require.Equal(t, "runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenSharedFlagIsFalseThenRunnerSegmentExistsInTheUrl(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Shared = false
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheObjectName(s3CacheBuild, s3Cache, "key")
require.Equal(t, "runner/longtoke/project/10/key", url)
}
func TestGetCacheObjectNameWhenSharedFlagIsFalseThenRunnerSegmentShouldNotBePresent(t *testing.T) {
s3Cache := defaultS3CacheFactory()
s3Cache.Shared = true
s3CacheBuild := defaults3CacheBuild(s3Cache)
url := getCacheObjectName(s3CacheBuild, s3Cache, "key")
require.Equal(t, "project/10/key", url)
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment