Docs/modules/File Storage

File Storage

NextShip includes file storage using Cloudflare R2 or AWS S3, with presigned URLs for secure uploads.

Features

  • S3-compatible storage (R2 or S3)
  • Presigned URLs for direct uploads
  • File metadata tracking in database
  • User file management UI
  • Image optimization support

Configuration

Cloudflare R2

R2_ACCOUNT_ID=your-account-id
R2_ACCESS_KEY_ID=your-access-key
R2_SECRET_ACCESS_KEY=your-secret-key
R2_BUCKET_NAME=your-bucket
R2_PUBLIC_URL=https://pub-xxx.r2.dev

AWS S3

AWS_ACCESS_KEY_ID=your-access-key
AWS_SECRET_ACCESS_KEY=your-secret-key
AWS_REGION=us-east-1
S3_BUCKET_NAME=your-bucket

Database Schema

// src/lib/db/schema.ts
export const files = pgTable("files", {
  id: text("id").primaryKey(),
  userId: text("user_id").references(() => users.id),
  name: text("name").notNull(),
  key: text("key").notNull(),           // S3 object key
  size: integer("size").notNull(),
  mimeType: text("mime_type"),
  url: text("url"),                     // Public URL
  createdAt: timestamp("created_at").defaultNow(),
});

Server Actions

Get Upload URL

// src/server/actions/files.ts
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
 
const s3 = new S3Client({
  region: "auto",
  endpoint: `https://${process.env.R2_ACCOUNT_ID}.r2.cloudflarestorage.com`,
  credentials: {
    accessKeyId: process.env.R2_ACCESS_KEY_ID!,
    secretAccessKey: process.env.R2_SECRET_ACCESS_KEY!,
  },
});
 
export async function getUploadUrl(params: {
  filename: string;
  contentType: string;
}) {
  const session = await requireAuth();
 
  const key = `${session.user.id}/${crypto.randomUUID()}-${params.filename}`;
 
  const command = new PutObjectCommand({
    Bucket: process.env.R2_BUCKET_NAME,
    Key: key,
    ContentType: params.contentType,
  });
 
  const uploadUrl = await getSignedUrl(s3, command, { expiresIn: 3600 });
 
  return {
    uploadUrl,
    key,
    publicUrl: `${process.env.R2_PUBLIC_URL}/${key}`,
  };
}

Save File Metadata

export async function saveFile(params: {
  name: string;
  key: string;
  size: number;
  mimeType: string;
  url: string;
}) {
  const session = await requireAuth();
 
  const [file] = await db.insert(files).values({
    id: crypto.randomUUID(),
    userId: session.user.id,
    ...params,
  }).returning();
 
  return { data: file };
}

List User Files

export async function getMyFiles(params: {
  page: number;
  limit: number;
}) {
  const session = await requireAuth();
 
  const userFiles = await db.query.files.findMany({
    where: eq(files.userId, session.user.id),
    orderBy: desc(files.createdAt),
    limit: params.limit,
    offset: (params.page - 1) * params.limit,
  });
 
  const total = await db
    .select({ count: count() })
    .from(files)
    .where(eq(files.userId, session.user.id));
 
  return {
    items: userFiles,
    total: total[0].count,
  };
}

Delete File

import { DeleteObjectCommand } from "@aws-sdk/client-s3";
 
export async function deleteFile(fileId: string) {
  const session = await requireAuth();
 
  // Get file to verify ownership
  const file = await db.query.files.findFirst({
    where: and(
      eq(files.id, fileId),
      eq(files.userId, session.user.id)
    ),
  });
 
  if (!file) {
    return { success: false, error: { code: "NOT_FOUND", message: "File not found" } };
  }
 
  // Delete from S3
  await s3.send(new DeleteObjectCommand({
    Bucket: process.env.R2_BUCKET_NAME,
    Key: file.key,
  }));
 
  // Delete from database
  await db.delete(files).where(eq(files.id, fileId));
 
  return { success: true, data: { deleted: true } };
}

Client Usage

Upload Component

"use client";
 
import { getUploadUrl, saveFile } from "@/server/actions/files";
 
export function FileUpload() {
  const [uploading, setUploading] = useState(false);
 
  const handleUpload = async (file: File) => {
    setUploading(true);
 
    try {
      // 1. Get presigned URL
      const { uploadUrl, key, publicUrl } = await getUploadUrl({
        filename: file.name,
        contentType: file.type,
      });
 
      // 2. Upload directly to S3/R2
      await fetch(uploadUrl, {
        method: "PUT",
        body: file,
        headers: {
          "Content-Type": file.type,
        },
      });
 
      // 3. Save metadata to database
      await saveFile({
        name: file.name,
        key,
        size: file.size,
        mimeType: file.type,
        url: publicUrl,
      });
    } finally {
      setUploading(false);
    }
  };
 
  return (
    <input
      type="file"
      onChange={(e) => e.target.files?.[0] && handleUpload(e.target.files[0])}
      disabled={uploading}
    />
  );
}

File List Component

"use client";
 
import { getMyFiles, deleteFile } from "@/server/actions/files";
 
export function FileList() {
  const [files, setFiles] = useState([]);
 
  useEffect(() => {
    getMyFiles({ page: 1, limit: 20 }).then(setFiles);
  }, []);
 
  const handleDelete = async (fileId: string) => {
    await deleteFile(fileId);
    setFiles(files.filter(f => f.id !== fileId));
  };
 
  return (
    <ul>
      {files.map(file => (
        <li key={file.id}>
          <a href={file.url}>{file.name}</a>
          <button onClick={() => handleDelete(file.id)}>Delete</button>
        </li>
      ))}
    </ul>
  );
}

File Management Page

The files page (/files) provides:

  • File upload with drag-and-drop
  • File listing with pagination
  • File preview (images)
  • Delete functionality
  • Storage usage display

Best Practices

  1. Validate file types - Check MIME types before upload
  2. Set size limits - Prevent oversized uploads
  3. Use presigned URLs - Never expose credentials to client
  4. Clean up orphans - Delete S3 objects when DB records are deleted
  5. Set CORS properly - Configure bucket CORS for your domain