AWS SDK S3 - File uploads, downloads, and presigned URLs with S3 in Next.js
Recipe
npm install @aws-sdk/client-s3 @aws-sdk/s3-request-presigner// lib/s3.ts
import { S3Client } from "@aws-sdk/client-s3";
export const s3Client = new S3Client({
region: process.env.AWS_REGION!,
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID!,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!,
},
});// app/api/upload/route.ts
import { PutObjectCommand } from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { s3Client } from "@/lib/s3";
export async function POST(req: Request) {
const { filename, contentType } = await req.json();
const command = new PutObjectCommand({
Bucket: process.env.S3_BUCKET_NAME!,
Key: `uploads/${Date.now()}-${filename}`,
ContentType: contentType,
});
const url = await getSignedUrl(s3Client, command, { expiresIn: 600 });
return Response.json({ url });
}When to reach for this: You need to upload, download, or manage files in Amazon S3 from a Next.js application, using presigned URLs for secure client-side uploads.
Working Example
// app/components/FileUploader.tsx
"use client";
import { useState, useRef } from "react";
export default function FileUploader() {
const [uploading, setUploading] = useState(false);
const [uploadedUrl, setUploadedUrl] = useState<string | null>(null);
const fileInputRef = useRef<HTMLInputElement>(null);
async function handleUpload() {
const file = fileInputRef.current?.files?.[0];
if (!file) return;
setUploading(true);
try {
// Step 1: Get presigned URL from our API
const res = await fetch("/api/upload", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
filename: file.name,
contentType: file.type,
}),
});
const { url } = await res.json();
// Step 2: Upload directly to S3 using the presigned URL
await fetch(url, {
method: "PUT",
headers: { "Content-Type": file.type },
body: file,
});
// Extract the permanent URL (without query params)
const permanentUrl = url.split("?")[0];
setUploadedUrl(permanentUrl);
} catch (error) {
console.error("Upload failed:", error);
} finally {
setUploading(false);
}
}
return (
<div className="p-6 max-w-md mx-auto space-y-4">
<div>
<input
ref={fileInputRef}
type="file"
accept="image/*,.pdf,.doc,.docx"
className="block w-full text-sm"
/>
</div>
<button
onClick={handleUpload}
disabled={uploading}
className="bg-blue-600 text-white px-4 py-2 rounded disabled:opacity-50"
>
{uploading ? "Uploading..." : "Upload"}
</button>
{uploadedUrl && (
<p className="text-sm text-green-600">
Uploaded: <a href={uploadedUrl} className="underline">View file</a>
</p>
)}
</div>
);
}// app/actions/s3-actions.ts
"use server";
import {
ListObjectsV2Command,
GetObjectCommand,
DeleteObjectCommand,
} from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { s3Client } from "@/lib/s3";
const BUCKET = process.env.S3_BUCKET_NAME!;
export async function listFiles(prefix: string = "uploads/") {
const command = new ListObjectsV2Command({
Bucket: BUCKET,
Prefix: prefix,
MaxKeys: 50,
});
const response = await s3Client.send(command);
return (
response.Contents?.map((item) => ({
key: item.Key!,
size: item.Size!,
lastModified: item.LastModified!.toISOString(),
})) ?? []
);
}
export async function getDownloadUrl(key: string) {
const command = new GetObjectCommand({
Bucket: BUCKET,
Key: key,
});
return getSignedUrl(s3Client, command, { expiresIn: 3600 });
}
export async function deleteFile(key: string) {
const command = new DeleteObjectCommand({
Bucket: BUCKET,
Key: key,
});
await s3Client.send(command);
}What this demonstrates:
- Presigned URL generation for secure client-side uploads
- Direct-to-S3 upload bypassing your server (no file size limits from Next.js)
- Server Actions for listing, downloading, and deleting objects
- Proper separation of S3 client configuration
Deep Dive
How It Works
- The AWS SDK v3 uses a modular architecture: install only the clients you need (
@aws-sdk/client-s3) - Commands follow the Command pattern: create a command object, then
send()it via the client - Presigned URLs allow clients to upload or download directly to/from S3 without exposing AWS credentials
- The
@aws-sdk/s3-request-presignergenerates time-limited signed URLs for any S3 command - Credentials are resolved from environment variables, IAM roles, or the credentials chain automatically
- S3 keys (paths) use
/as a delimiter but S3 is flat storage — folders are just key prefixes
Variations
Server Action file upload (for smaller files):
"use server";
import { PutObjectCommand } from "@aws-sdk/client-s3";
import { s3Client } from "@/lib/s3";
export async function uploadFile(formData: FormData) {
const file = formData.get("file") as File;
const buffer = Buffer.from(await file.arrayBuffer());
const command = new PutObjectCommand({
Bucket: process.env.S3_BUCKET_NAME!,
Key: `uploads/${Date.now()}-${file.name}`,
Body: buffer,
ContentType: file.type,
});
await s3Client.send(command);
}Download and stream a file:
// app/api/download/[key]/route.ts
import { GetObjectCommand } from "@aws-sdk/client-s3";
import { s3Client } from "@/lib/s3";
export async function GET(
req: Request,
{ params }: { params: { key: string } }
) {
const command = new GetObjectCommand({
Bucket: process.env.S3_BUCKET_NAME!,
Key: decodeURIComponent(params.key),
});
const response = await s3Client.send(command);
const stream = response.Body as ReadableStream;
return new Response(stream, {
headers: {
"Content-Type": response.ContentType ?? "application/octet-stream",
"Content-Length": String(response.ContentLength),
},
});
}Copy objects between buckets:
import { CopyObjectCommand } from "@aws-sdk/client-s3";
const command = new CopyObjectCommand({
Bucket: "destination-bucket",
CopySource: "source-bucket/path/to/file.pdf",
Key: "new-path/file.pdf",
});
await s3Client.send(command);TypeScript Notes
- All commands accept typed input interfaces:
PutObjectCommandInput,GetObjectCommandInput, etc. GetObjectCommandOutput.Bodyis typed asStreamingBlobPayloadOutputTypes— cast toReadableStreamin serverless environments- Use
S3ClientConfigtype for custom client configuration
import type {
PutObjectCommandInput,
ListObjectsV2CommandOutput,
} from "@aws-sdk/client-s3";
const params: PutObjectCommandInput = {
Bucket: "my-bucket",
Key: "file.txt",
Body: "Hello, World!",
};Gotchas
-
CORS errors on client-side upload — The browser blocks PUT requests to S3. Fix: Configure CORS on the S3 bucket to allow PUT from your domain. Add
AllowedOrigins,AllowedMethods: ["PUT"], andAllowedHeaders: ["Content-Type"]. -
Presigned URL expired — URLs expire after the configured
expiresInseconds. Fix: Generate URLs just before use. Default to 600 seconds (10 minutes) for uploads; do not generate URLs far in advance. -
Large file uploads failing — Files over 5GB cannot use single PUT. Fix: Use multipart upload with
@aws-sdk/lib-storageUploadclass for files over 100MB. -
Missing Content-Type — Files uploaded without Content-Type get
application/octet-stream. Fix: Always passContentTypein thePutObjectCommandand in the client-sidefetchheaders. -
Next.js body size limit — Server Action uploads are limited to the Next.js body size limit (default 1MB). Fix: Use presigned URL uploads for larger files, or increase
experimental.serverActions.bodySizeLimitinnext.config.js. -
Credential exposure — Never import
@aws-sdk/client-s3in client components. Fix: All AWS SDK usage must be in API routes, Server Components, or Server Actions.
Alternatives
| Library | Best For | Trade-off |
|---|---|---|
| @aws-sdk/client-s3 | Full S3 API access | Requires AWS account and CORS setup |
| Vercel Blob | Simple file storage on Vercel | Vercel-only, less control |
| Uploadthing | File uploads with React hooks | Abstraction over S3, less flexibility |
| Cloudflare R2 | S3-compatible, no egress fees | Separate SDK or S3 compatibility mode |
| Supabase Storage | Integrated with Supabase | Tied to Supabase ecosystem |
FAQs
What is a presigned URL and why should I use one for uploads?
- A presigned URL is a time-limited, signed URL that grants temporary access to a specific S3 operation
- The client uploads directly to S3, bypassing your server and avoiding body size limits
- AWS credentials are never exposed to the browser
- You control expiration time (e.g., 600 seconds) and the allowed operation (PUT, GET)
How does the two-step presigned URL upload flow work?
- Client sends filename and content type to your API route
- Server generates a presigned PUT URL via
getSignedUrland returns it - Client uses
fetch(url, { method: "PUT", body: file })to upload directly to S3 - The file lands in S3 without ever passing through your Next.js server
Gotcha: Why do I get CORS errors when uploading from the browser to S3?
- S3 buckets block cross-origin requests by default
- You must configure a CORS policy on the bucket allowing
PUTfrom your domain - Include
AllowedHeaders: ["Content-Type"]and your domain inAllowedOrigins - CORS configuration is set in the S3 bucket settings, not in your application code
How do I upload files larger than 5GB to S3?
- Single
PutObjectCommandis limited to 5GB - Use
@aws-sdk/lib-storageand itsUploadclass for multipart uploads - Multipart upload splits the file into parts, uploads them in parallel, and assembles on S3
- Recommended for any file over 100MB
What is the Next.js body size limit and how does it affect Server Action uploads?
- Server Actions default to a 1MB request body limit
- Uploading files through a Server Action sends the file through your server
- For larger files, use presigned URL uploads instead
- Alternatively, increase
experimental.serverActions.bodySizeLimitinnext.config.js
How do I stream a file download from S3 through an API route?
const command = new GetObjectCommand({ Bucket: BUCKET, Key: key });
const response = await s3Client.send(command);
return new Response(response.Body as ReadableStream, {
headers: {
"Content-Type": response.ContentType ?? "application/octet-stream",
"Content-Length": String(response.ContentLength),
},
});Gotcha: Why does my uploaded file have application/octet-stream as its Content-Type?
- If you omit
ContentTypeinPutObjectCommand, S3 defaults toapplication/octet-stream - Always pass
ContentType: file.typein both the command and the client-sidefetchheaders - This ensures browsers can display or handle the file correctly on download
How do I type the S3 command inputs in TypeScript?
import type { PutObjectCommandInput } from "@aws-sdk/client-s3";
const params: PutObjectCommandInput = {
Bucket: "my-bucket",
Key: "file.txt",
Body: "Hello, World!",
ContentType: "text/plain",
};Each command has a corresponding *CommandInput and *CommandOutput type.
How do I handle the GetObjectCommandOutput.Body type in TypeScript?
Bodyis typed asStreamingBlobPayloadOutputTypes, notReadableStream- In serverless / API route environments, cast it:
response.Body as ReadableStream - Always check for
undefinedbefore using the body
Why should I never import @aws-sdk/client-s3 in a client component?
- Client components ship JavaScript to the browser
- Importing the AWS SDK in a client component would bundle AWS credentials into the client
- All AWS SDK usage must stay in API routes, Server Components, or Server Actions
How do I list objects in a specific S3 "folder"?
const command = new ListObjectsV2Command({
Bucket: BUCKET,
Prefix: "uploads/images/",
MaxKeys: 50,
});
const response = await s3Client.send(command);
const files = response.Contents ?? [];S3 is flat storage -- "folders" are just key prefixes separated by /.
How do I delete a file from S3 using a Server Action?
"use server";
import { DeleteObjectCommand } from "@aws-sdk/client-s3";
import { s3Client } from "@/lib/s3";
export async function deleteFile(key: string) {
await s3Client.send(
new DeleteObjectCommand({ Bucket: BUCKET, Key: key })
);
}Related
- Next.js Server Actions — Server-side S3 operations
- AWS SDK Lambda — Trigger Lambda for file processing
- Prisma — Store file metadata in database