Skip to content

Commit

Permalink
Merge pull request #335 from EWhite613/minio
Browse files Browse the repository at this point in the history
Officially support minio as a provider
  • Loading branch information
fox1t committed Mar 29, 2024
2 parents d0eabed + 5069d74 commit fd92582
Show file tree
Hide file tree
Showing 6 changed files with 220 additions and 2 deletions.
8 changes: 8 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,14 @@ S3_ACCESS_KEY=
S3_SECRET_KEY=
S3_REGION=
S3_ENDPOINT=
# MINIO (Use along with AWS S3 Storage Provider)
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
AWS_REGION=
S3_ACCESS_KEY=
S3_SECRET_KEY=
S3_REGION=
S3_ENDPOINT=
# Google Cloud Storage Provider
GCS_PROJECT_ID=
GCS_CLIENT_EMAIL=
Expand Down
13 changes: 11 additions & 2 deletions docs/supported-storage-providers.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,18 @@ DigitalOcean Spaces is an S3-compatible object storage that this project also su
GCS_CLIENT_EMAIL=
GCS_PRIVATE_KEY=
```

## Azure Blob Storage

1. Create a new Blob Storage.
2. On "Security + networking" tab, copy one of `Connection string` on "Access keys" blade.
3. Set `ABS_CONNECTION_STRING` to the connection string.
3. Set `ABS_CONNECTION_STRING` to the connection string.

## Minio
1. Create Access key
2. Fill in the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables based on the access key you created.
3. Create bucket
4. Set `STORAGE_PATH` to the name of the bucket you created.
5. Set `AWS_REGION` (can leave blank `S3_REGION=` for none).
6. Set `STORAGE_PROVIDER` to `minio`.
7. Set `S3_ENDPOINT` to Minio url (ie `http://127.0.0.1:9000`)
1 change: 1 addition & 0 deletions src/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ export const STORAGE_PROVIDERS = {
s3: 's3',
GOOGLE_CLOUD_STORAGE: 'google-cloud-storage',
AZURE_BLOB_STORAGE: 'azure-blob-storage',
MINIO: 'minio',
} as const
export type STORAGE_PROVIDERS =
typeof STORAGE_PROVIDERS[keyof typeof STORAGE_PROVIDERS]
Expand Down
14 changes: 14 additions & 0 deletions src/plugins/remote-cache/storage/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,20 @@ function createStorageLocation<Provider extends STORAGE_PROVIDERS>(
providerOptions as S3Options
return createS3({ accessKey, secretKey, bucket: path, region, endpoint })
}
case STORAGE_PROVIDERS.MINIO: {
const { accessKey, secretKey, region, endpoint } =
providerOptions as S3Options
return createS3({
accessKey,
secretKey,
bucket: path,
region,
endpoint,
s3OptionsPassthrough: {
s3ForcePathStyle: true,
},
})
}
case STORAGE_PROVIDERS.GOOGLE_CLOUD_STORAGE: {
const { clientEmail, privateKey, projectId } =
providerOptions as GoogleCloudStorageOptions
Expand Down
4 changes: 4 additions & 0 deletions src/plugins/remote-cache/storage/s3.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import aws from 'aws-sdk'
import type { S3 } from 'aws-sdk'
import s3 from 's3-blob-store'

export interface S3Options {
Expand All @@ -7,6 +8,7 @@ export interface S3Options {
region?: string
endpoint?: string
bucket: string
s3OptionsPassthrough?: S3.ClientConfiguration
}

// AWS_ envs are default for aws-sdk
Expand All @@ -16,6 +18,7 @@ export function createS3({
bucket,
region = process.env.AWS_REGION || process.env.S3_REGION,
endpoint,
s3OptionsPassthrough = {},
}: S3Options) {
const client = new aws.S3({
...(accessKey && secretKey
Expand All @@ -32,6 +35,7 @@ export function createS3({
...(process.env.NODE_ENV === 'test'
? { sslEnabled: false, s3ForcePathStyle: true }
: {}),
...s3OptionsPassthrough,
})

const location = s3({
Expand Down
182 changes: 182 additions & 0 deletions test/minio.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
import assert from 'node:assert/strict'
import crypto from 'node:crypto'
import { tmpdir } from 'node:os'
import { test } from 'node:test'
import S3erver from 's3rver'

const testEnv = {
NODE_ENV: 'test',
PORT: 3000,
LOG_LEVEL: 'info',
LOG_MODE: 'stdout',
LOG_FILE: 'server.log',
TURBO_TOKEN: ['changeme'],
STORAGE_PROVIDER: 'minio',
STORAGE_PATH: 'turborepo-remote-cache-test',
AWS_ACCESS_KEY_ID: 'S3RVER',
AWS_SECRET_ACCESS_KEY: 'S3RVER',
AWS_REGION: '',
S3_ENDPOINT: 'http://localhost:4568',
}
Object.assign(process.env, testEnv)

const server = new S3erver({
directory: tmpdir(),
silent: true,
configureBuckets: [
{
name: process.env.STORAGE_PATH,
},
],
})

server.run((err) => {
assert.equal(err, null)
test('Minio', async (t) => {
const artifactId = crypto.randomBytes(20).toString('hex')
const teamId = 'superteam'

const { createApp } = await import('../src/app.js')
const app = createApp({ logger: false })
await app.ready()

await t.test('loads correct env vars', async () => {
assert.equal(app.config.STORAGE_PROVIDER, testEnv.STORAGE_PROVIDER)
assert.equal(app.config.STORAGE_PATH, testEnv.STORAGE_PATH)
assert.equal(app.config.AWS_ACCESS_KEY_ID, testEnv.AWS_ACCESS_KEY_ID)
assert.equal(
app.config.AWS_SECRET_ACCESS_KEY,
testEnv.AWS_SECRET_ACCESS_KEY,
)
assert.equal(app.config.AWS_REGION, testEnv.AWS_REGION)
assert.equal(app.config.S3_ENDPOINT, testEnv.S3_ENDPOINT)
})

await t.test(
'should return 400 when missing authorization header',
async () => {
const response = await app.inject({
method: 'GET',
url: '/v8/artifacts/not-found',
headers: {},
})
assert.equal(response.statusCode, 400)
assert.equal(response.json().message, 'Missing Authorization header')
},
)

await t.test(
'should return 401 when wrong authorization token is provided',
async () => {
const response = await app.inject({
method: 'GET',
url: '/v8/artifacts/not-found',
headers: {
authorization: 'wrong token',
},
})
assert.equal(response.statusCode, 401)
assert.equal(response.json().message, 'Invalid authorization token')
},
)

await t.test(
'should return 400 when missing teamId query parameter',
async () => {
const response = await app.inject({
method: 'GET',
url: '/v8/artifacts/not-found',
headers: {
authorization: 'Bearer changeme',
},
})
assert.equal(response.statusCode, 400)
assert.equal(
response.json().message,
"querystring should have required property 'teamId'",
)
},
)

await t.test('should return 404 on cache miss', async () => {
const response = await app.inject({
method: 'GET',
url: '/v8/artifacts/not-found',
headers: {
authorization: 'Bearer changeme',
},
query: {
teamId: 'superteam',
},
})
assert.equal(response.statusCode, 404)
assert.equal(response.json().message, 'Artifact not found')
})

await t.test('should upload an artifact', async () => {
const response = await app.inject({
method: 'PUT',
url: `/v8/artifacts/${artifactId}`,
headers: {
authorization: 'Bearer changeme',
'content-type': 'application/octet-stream',
},
query: {
teamId,
},
payload: Buffer.from('test cache data'),
})
assert.equal(response.statusCode, 200)
assert.deepEqual(response.json(), { urls: [`${teamId}/${artifactId}`] })
})

await t.test('should download an artifact', async () => {
const response = await app.inject({
method: 'GET',
url: `/v8/artifacts/${artifactId}`,
headers: {
authorization: 'Bearer changeme',
},
query: {
teamId,
},
})
assert.equal(response.statusCode, 200)
assert.deepEqual(response.body, 'test cache data')
})

await t.test('should verify artifact exists', async () => {
const response = await app.inject({
method: 'HEAD',
url: `/v8/artifacts/${artifactId}`,
headers: {
authorization: 'Bearer changeme',
},
query: {
teamId,
},
})
assert.equal(response.statusCode, 200)
assert.deepEqual(response.body, '')
})

await t.test('should verify artifact does not exist', async () => {
const response = await app.inject({
method: 'HEAD',
url: '/v8/artifacts/not-found',
headers: {
authorization: 'Bearer changeme',
},
query: {
teamId,
},
})
assert.equal(response.statusCode, 404)
assert.equal(response.json().message, 'Artifact not found')
})

t.after(() => {
server.close()
})
})
})

0 comments on commit fd92582

Please sign in to comment.