Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/e2e.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,8 @@ export default createE2EConfig([
{ file: 'locked-documents', shards: 1 },
{ file: 'i18n', shards: 1 },
{ file: 'plugin-cloud-storage', shards: 1 },
{ file: 'storage-s3__client-uploads#client-uploads/config.ts', shards: 1 },
{ file: 'storage-vercel-blob__client-uploads#client-uploads/config.ts', shards: 1 },
{ file: 'plugin-form-builder', shards: 1 },
{ file: 'plugin-import-export', shards: 1 },
{ file: 'plugin-multi-tenant', shards: 2 },
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ jobs:

- name: Start LocalStack
run: pnpm docker:start
if: contains(fromJson('["plugin-cloud-storage", "plugin-import-export"]'), matrix.suite)
if: contains(fromJson('["plugin-cloud-storage", "plugin-import-export"]'), matrix.suite) || contains(matrix.suite, 'storage-s3__client-uploads') || contains(matrix.suite, 'storage-vercel-blob__client-uploads')

- name: Start database
id: db
Expand Down Expand Up @@ -512,7 +512,7 @@ jobs:

- name: Start LocalStack
run: pnpm docker:start
if: contains(fromJson('["plugin-cloud-storage", "plugin-import-export"]'), matrix.suite)
if: contains(fromJson('["plugin-cloud-storage", "plugin-import-export"]'), matrix.suite) || contains(matrix.suite, 'storage-s3__client-uploads') || contains(matrix.suite, 'storage-vercel-blob__client-uploads')

- name: Store Playwright's Version
run: |
Expand Down
20 changes: 11 additions & 9 deletions packages/plugin-cloud-storage/src/hooks/afterChange.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,17 @@ export const getAfterChangeHook =
}

const uploadResults = await Promise.all(
files.map((file) =>
adapter.handleUpload({
clientUploadContext: file.clientUploadContext,
collection,
data: doc,
file,
req,
}),
),
files
.filter((file) => !file.clientUploadContext)
.map((file) =>
adapter.handleUpload({
clientUploadContext: file.clientUploadContext,
collection,
data: doc,
file,
req,
}),
),
)

const uploadMetadata = uploadResults
Expand Down
1 change: 1 addition & 0 deletions test/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,7 @@ services:
volumes:
- localstack_data:/var/lib/localstack
- '/var/run/docker.sock:/var/run/docker.sock'
- './localstack-init/ready.d:/etc/localstack/init/ready.d'

# ── Azure Storage (Azurite emulator) ────────────────────────
azure-storage:
Expand Down
19 changes: 19 additions & 0 deletions test/localstack-init/ready.d/01-s3-cors.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/bin/bash
# Configure CORS on the S3 bucket so browsers can PUT files directly to localstack.
# Runs automatically when LocalStack is ready (mounted at /etc/localstack/init/ready.d/).

awslocal s3api create-bucket --bucket payload-bucket --region us-east-1 2>/dev/null || true

awslocal s3api put-bucket-cors --bucket payload-bucket --cors-configuration '{
"CORSRules": [
{
"AllowedHeaders": ["*"],
"AllowedMethods": ["GET", "PUT", "HEAD", "DELETE"],
"AllowedOrigins": ["*"],
"ExposeHeaders": ["ETag"],
"MaxAgeSeconds": 3600
}
]
}'

echo "S3 CORS configured on payload-bucket"
13 changes: 13 additions & 0 deletions test/storage-s3/client-uploads/collections/MediaContainer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import type { CollectionConfig } from 'payload'

export const MediaContainer: CollectionConfig = {
slug: 'media-container',
fields: [
{
name: 'files',
type: 'upload',
relationTo: 'media',
hasMany: true,
},
],
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,28 +3,28 @@ import dotenv from 'dotenv'
import { fileURLToPath } from 'node:url'
import path from 'path'

import { buildConfigWithDefaults } from '../buildConfigWithDefaults.js'
import { devUser } from '../credentials.js'
import { Media } from './collections/Media.js'
import { MediaWithPrefix } from './collections/MediaWithPrefix.js'
import { Users } from './collections/Users.js'
import { mediaSlug, mediaWithPrefixSlug } from './shared.js'
import { MB } from './test-utils.js'
import { buildConfigWithDefaults } from '../../buildConfigWithDefaults.js'
import { devUser } from '../../credentials.js'
import { Media } from '../collections/Media.js'
import { MediaWithPrefix } from '../collections/MediaWithPrefix.js'
import { Users } from '../collections/Users.js'
import { mediaSlug, mediaWithPrefixSlug } from '../shared.js'
import { MediaContainer } from './collections/MediaContainer.js'

const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)

// Load config to work with emulated services
dotenv.config({
path: path.resolve(dirname, '../plugin-cloud-storage/.env.emulated'),
path: path.resolve(dirname, '../../plugin-cloud-storage/.env.emulated'),
})

export default buildConfigWithDefaults({
admin: {
importMap: {
baseDir: path.resolve(dirname),
baseDir: path.resolve(dirname, '..'),
},
},
collections: [Media, MediaWithPrefix, Users],
collections: [Media, MediaWithPrefix, MediaContainer, Users],
onInit: async (payload) => {
await payload.create({
collection: 'users',
Expand Down Expand Up @@ -59,8 +59,8 @@ export default buildConfigWithDefaults({
],
upload: {
limits: {
fileSize: MB(10),
}, // 10 mb
fileSize: 10 * 1024 * 1024, // 10 MB
},
},
typescript: {
outputFile: path.resolve(dirname, 'payload-types.ts'),
Expand Down
217 changes: 217 additions & 0 deletions test/storage-s3/client-uploads/e2e.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
import type { Page } from '@playwright/test'

import { expect, test } from '@playwright/test'
import dotenv from 'dotenv'
import * as path from 'path'
import { fileURLToPath } from 'url'

import {
ensureCompilationIsDone,
exactText,
saveDocAndAssert,
} from '../../__helpers/e2e/helpers.js'
import { AdminUrlUtil } from '../../__helpers/shared/adminUrlUtil.js'
import { initPayloadE2ENoConfig } from '../../__helpers/shared/initPayloadE2ENoConfig.js'
import { TEST_TIMEOUT_LONG } from '../../playwright.config.js'
import { mediaSlug } from '../shared.js'

const filename = fileURLToPath(import.meta.url)
const dirname = path.dirname(filename)

dotenv.config({ path: path.resolve(dirname, '../../plugin-cloud-storage/.env.emulated') })

// e.g. "localhost:4566" — used to detect file upload requests going directly to S3
const s3Endpoint =
process.env.S3_ENDPOINT ?? process.env.AWS_ENDPOINT_URL ?? process.env.AWS_ENDPOINT_URL_S3

if (!s3Endpoint) {
throw new Error('Missing S3 endpoint env (S3_ENDPOINT, AWS_ENDPOINT_URL, or AWS_ENDPOINT_URL_S3)')
}

const s3Host = new URL(s3Endpoint).host
// image.png is 89 KB — any request with content-length above this threshold is a file upload
const FILE_SIZE_THRESHOLD = 1_000

const mediaContainerSlug = 'media-container'

test.describe('storage-s3 client uploads E2E', () => {
let page: Page
let mediaURL: AdminUrlUtil
let mediaContainerURL: AdminUrlUtil
let payloadHost: string

test.beforeAll(async ({ browser }, testInfo) => {
testInfo.setTimeout(TEST_TIMEOUT_LONG)
const { serverURL } = await initPayloadE2ENoConfig({ dirname })

payloadHost = new URL(serverURL).host
mediaURL = new AdminUrlUtil(serverURL, mediaSlug)
mediaContainerURL = new AdminUrlUtil(serverURL, mediaContainerSlug)

const context = await browser.newContext()
page = await context.newPage()
await ensureCompilationIsDone({ page, serverURL })
})

test('should complete a single client upload via the admin UI', async () => {
await page.goto(mediaURL.create)
await page.setInputFiles('input[type="file"]', path.resolve(dirname, '../../uploads/image.png'))
await expect(page.locator('.file-field__filename')).toHaveValue('image.png')
await saveDocAndAssert(page)
})

test('should upload file directly to S3, not through the Payload server', async ({ browser }) => {
const context = await browser.newContext()
const testPage = await context.newPage()

const largeRequestsToPayload: string[] = []
const putsToS3: string[] = []

testPage.on('request', (request) => {
const url = request.url()
const contentLength = parseInt(request.headers()['content-length'] ?? '0', 10)

if (new URL(url).host === payloadHost && contentLength > FILE_SIZE_THRESHOLD) {
largeRequestsToPayload.push(`${request.method()} ${url} (${contentLength} bytes)`)
}
if (new URL(url).host === s3Host && request.method() === 'PUT') {
putsToS3.push(url)
}
})

await testPage.goto(mediaURL.create)
await testPage.setInputFiles(
'input[type="file"]',
path.resolve(dirname, '../../uploads/image.png'),
)
await saveDocAndAssert(testPage)

expect(
largeRequestsToPayload,
`File bytes were sent to Payload: ${largeRequestsToPayload.join(', ')}`,
).toHaveLength(0)

expect(
putsToS3.length,
'Expected at least one PUT request to the S3 endpoint',
).toBeGreaterThanOrEqual(1)

await context.close()
})

test('should bulk upload multiple files directly to S3, not through Payload', async ({
browser,
}) => {
const context = await browser.newContext()
const testPage = await context.newPage()

const largeRequestsToPayload: string[] = []
const putsToS3: string[] = []

testPage.on('request', (request) => {
const url = request.url()
const contentLength = parseInt(request.headers()['content-length'] ?? '0', 10)

if (new URL(url).host === payloadHost && contentLength > FILE_SIZE_THRESHOLD) {
largeRequestsToPayload.push(`${request.method()} ${url} (${contentLength} bytes)`)
}
if (new URL(url).host === s3Host && request.method() === 'PUT') {
putsToS3.push(url)
}
})

await testPage.goto(mediaContainerURL.create)

const createNewButton = testPage.locator('#field-files button', {
hasText: exactText('Create New'),
})
await expect(createNewButton).toBeVisible()
await expect(createNewButton).toBeEnabled()
await createNewButton.click()

const bulkUploadModal = testPage.locator('#files-bulk-upload-drawer-slug-1')
await expect(bulkUploadModal).toBeVisible()

await bulkUploadModal
.locator('.dropzone input[type="file"]')
.setInputFiles([
path.resolve(dirname, '../../uploads/image.png'),
path.resolve(dirname, '../../uploads/test-image.png'),
])

const saveButton = bulkUploadModal.locator('.bulk-upload--actions-bar__saveButtons button')
await saveButton.click()

await expect(bulkUploadModal).toBeHidden({ timeout: 30_000 })

expect(putsToS3.length, 'Expected one PUT to S3 per uploaded file').toBeGreaterThanOrEqual(2)

expect(
largeRequestsToPayload,
`File bytes were sent to Payload: ${largeRequestsToPayload.join(', ')}`,
).toHaveLength(0)

const items = testPage.locator('#field-files .upload--has-many__dragItem')
await expect(items).toHaveCount(2)

await context.close()
})

test('should bulk upload files from the list view directly to S3, not through Payload', async ({
browser,
}) => {
const context = await browser.newContext()
const testPage = await context.newPage()

const largeRequestsToPayload: string[] = []
const putsToS3: string[] = []

testPage.on('request', (request) => {
const url = request.url()
const contentLength = parseInt(request.headers()['content-length'] ?? '0', 10)

if (new URL(url).host === payloadHost && contentLength > FILE_SIZE_THRESHOLD) {
largeRequestsToPayload.push(`${request.method()} ${url} (${contentLength} bytes)`)
}
if (new URL(url).host === s3Host && request.method() === 'PUT') {
putsToS3.push(url)
}
})

await testPage.goto(mediaURL.list)
await expect(testPage.locator('.list-header__title')).toBeVisible()

const bulkUploadButton = testPage.locator('.list-header__title-actions button', {
hasText: 'Bulk Upload',
})
await expect(bulkUploadButton).toBeEnabled()

// Click and retry until dropzone appears (handles hydration timing)
const dropzoneInput = testPage.locator('.dropzone input[type="file"]')
await expect(async () => {
await bulkUploadButton.click()
await expect(dropzoneInput).toBeAttached({ timeout: 1500 })
}).toPass({ timeout: 5000, intervals: [500] })

await testPage.setInputFiles('.dropzone input[type="file"]', [
path.resolve(dirname, '../../uploads/image.png'),
path.resolve(dirname, '../../uploads/test-image.png'),
])

const bulkUploadModal = testPage.locator('#media-bulk-upload-drawer-slug-1')
const saveButton = bulkUploadModal.locator('.bulk-upload--actions-bar__saveButtons button')
await expect(saveButton).toBeVisible()
await saveButton.click()

await expect(bulkUploadModal).toBeHidden({ timeout: 30_000 })

expect(putsToS3.length, 'Expected one PUT to S3 per uploaded file').toBeGreaterThanOrEqual(2)

expect(
largeRequestsToPayload,
`File bytes were sent to Payload: ${largeRequestsToPayload.join(', ')}`,
).toHaveLength(0)

await context.close()
})
})
Loading
Loading