Move to single .env file
This commit is contained in:
10
.claude/settings.json
Normal file
10
.claude/settings.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"enabledPlugins": {
|
||||
"payload@payload-marketplace": true,
|
||||
"frontend-design@claude-plugins-official": true,
|
||||
"superpowers@claude-plugins-official": true,
|
||||
"context7@claude-plugins-official": true,
|
||||
"claude-md-management@claude-plugins-official": true,
|
||||
"waynesutton-convex-skills@cpd-waynesutton-convex-skills": true
|
||||
}
|
||||
}
|
||||
369
.claude/skills/convex-best-practices/SKILL.md
Normal file
369
.claude/skills/convex-best-practices/SKILL.md
Normal file
@@ -0,0 +1,369 @@
|
||||
---
|
||||
name: convex-best-practices
|
||||
description: Guidelines for building production-ready Convex apps covering function organization, query patterns, validation, TypeScript usage, error handling, and the Zen of Convex design philosophy
|
||||
---
|
||||
|
||||
# Convex Best Practices
|
||||
|
||||
Build production-ready Convex applications by following established patterns for function organization, query optimization, validation, TypeScript usage, and error handling.
|
||||
|
||||
## Code Quality
|
||||
|
||||
All patterns in this skill comply with `@convex-dev/eslint-plugin`. Install it for build-time validation:
|
||||
|
||||
```bash
|
||||
npm i @convex-dev/eslint-plugin --save-dev
|
||||
```
|
||||
|
||||
```js
|
||||
// eslint.config.js
|
||||
import { defineConfig } from "eslint/config";
|
||||
import convexPlugin from "@convex-dev/eslint-plugin";
|
||||
|
||||
export default defineConfig([
|
||||
...convexPlugin.configs.recommended,
|
||||
]);
|
||||
```
|
||||
|
||||
The plugin enforces four rules:
|
||||
|
||||
| Rule | What it enforces |
|
||||
| ----------------------------------- | --------------------------------- |
|
||||
| `no-old-registered-function-syntax` | Object syntax with `handler` |
|
||||
| `require-argument-validators` | `args: {}` on all functions |
|
||||
| `explicit-table-ids` | Table name in db operations |
|
||||
| `import-wrong-runtime` | No Node imports in Convex runtime |
|
||||
|
||||
Docs: https://docs.convex.dev/eslint
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/understanding/best-practices/
|
||||
- Error Handling: https://docs.convex.dev/functions/error-handling
|
||||
- Write Conflicts: https://docs.convex.dev/error#1
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### The Zen of Convex
|
||||
|
||||
1. **Convex manages the hard parts** - Let Convex handle caching, real-time sync, and consistency
|
||||
2. **Functions are the API** - Design your functions as your application's interface
|
||||
3. **Schema is truth** - Define your data model explicitly in schema.ts
|
||||
4. **TypeScript everywhere** - Leverage end-to-end type safety
|
||||
5. **Queries are reactive** - Think in terms of subscriptions, not requests
|
||||
|
||||
### Function Organization
|
||||
|
||||
Organize your Convex functions by domain:
|
||||
|
||||
```typescript
|
||||
// convex/users.ts - User-related functions
|
||||
import { query, mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const get = query({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id("users"),
|
||||
_creationTime: v.number(),
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
}),
|
||||
v.null(),
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.get("users", args.userId);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Argument and Return Validation
|
||||
|
||||
Always define validators for arguments AND return types:
|
||||
|
||||
```typescript
|
||||
export const createTask = mutation({
|
||||
args: {
|
||||
title: v.string(),
|
||||
description: v.optional(v.string()),
|
||||
priority: v.union(v.literal("low"), v.literal("medium"), v.literal("high")),
|
||||
},
|
||||
returns: v.id("tasks"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("tasks", {
|
||||
title: args.title,
|
||||
description: args.description,
|
||||
priority: args.priority,
|
||||
completed: false,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Query Patterns
|
||||
|
||||
Use indexes instead of filters for efficient queries:
|
||||
|
||||
```typescript
|
||||
// Schema with index
|
||||
export default defineSchema({
|
||||
tasks: defineTable({
|
||||
userId: v.id("users"),
|
||||
status: v.string(),
|
||||
createdAt: v.number(),
|
||||
})
|
||||
.index("by_user", ["userId"])
|
||||
.index("by_user_and_status", ["userId", "status"]),
|
||||
});
|
||||
|
||||
// Query using index
|
||||
export const getTasksByUser = query({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.array(
|
||||
v.object({
|
||||
_id: v.id("tasks"),
|
||||
_creationTime: v.number(),
|
||||
userId: v.id("users"),
|
||||
status: v.string(),
|
||||
createdAt: v.number(),
|
||||
}),
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query("tasks")
|
||||
.withIndex("by_user", (q) => q.eq("userId", args.userId))
|
||||
.order("desc")
|
||||
.collect();
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Use ConvexError for user-facing errors:
|
||||
|
||||
```typescript
|
||||
import { ConvexError } from "convex/values";
|
||||
|
||||
export const updateTask = mutation({
|
||||
args: {
|
||||
taskId: v.id("tasks"),
|
||||
title: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const task = await ctx.db.get("tasks", args.taskId);
|
||||
|
||||
if (!task) {
|
||||
throw new ConvexError({
|
||||
code: "NOT_FOUND",
|
||||
message: "Task not found",
|
||||
});
|
||||
}
|
||||
|
||||
await ctx.db.patch("tasks", args.taskId, { title: args.title });
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Avoiding Write Conflicts (Optimistic Concurrency Control)
|
||||
|
||||
Convex uses OCC. Follow these patterns to minimize conflicts:
|
||||
|
||||
```typescript
|
||||
// GOOD: Make mutations idempotent
|
||||
export const completeTask = mutation({
|
||||
args: { taskId: v.id("tasks") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const task = await ctx.db.get("tasks", args.taskId);
|
||||
|
||||
// Early return if already complete (idempotent)
|
||||
if (!task || task.status === "completed") {
|
||||
return null;
|
||||
}
|
||||
|
||||
await ctx.db.patch("tasks", args.taskId, {
|
||||
status: "completed",
|
||||
completedAt: Date.now(),
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// GOOD: Patch directly without reading first when possible
|
||||
export const updateNote = mutation({
|
||||
args: { id: v.id("notes"), content: v.string() },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// Patch directly - ctx.db.patch throws if document doesn't exist
|
||||
await ctx.db.patch("notes", args.id, { content: args.content });
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// GOOD: Use Promise.all for parallel independent updates
|
||||
export const reorderItems = mutation({
|
||||
args: { itemIds: v.array(v.id("items")) },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const updates = args.itemIds.map((id, index) =>
|
||||
ctx.db.patch("items", id, { order: index }),
|
||||
);
|
||||
await Promise.all(updates);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### TypeScript Best Practices
|
||||
|
||||
```typescript
|
||||
import { Id, Doc } from "./_generated/dataModel";
|
||||
|
||||
// Use Id type for document references
|
||||
type UserId = Id<"users">;
|
||||
|
||||
// Use Doc type for full documents
|
||||
type User = Doc<"users">;
|
||||
|
||||
// Define Record types properly
|
||||
const userScores: Record<Id<"users">, number> = {};
|
||||
```
|
||||
|
||||
### Internal vs Public Functions
|
||||
|
||||
```typescript
|
||||
// Public function - exposed to clients
|
||||
export const getUser = query({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.union(
|
||||
v.null(),
|
||||
v.object({
|
||||
/* ... */
|
||||
}),
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
// ...
|
||||
},
|
||||
});
|
||||
|
||||
// Internal function - only callable from other Convex functions
|
||||
export const _updateUserStats = internalMutation({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// ...
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Complete CRUD Pattern
|
||||
|
||||
```typescript
|
||||
// convex/tasks.ts
|
||||
import { query, mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { ConvexError } from "convex/values";
|
||||
|
||||
const taskValidator = v.object({
|
||||
_id: v.id("tasks"),
|
||||
_creationTime: v.number(),
|
||||
title: v.string(),
|
||||
completed: v.boolean(),
|
||||
userId: v.id("users"),
|
||||
});
|
||||
|
||||
export const list = query({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.array(taskValidator),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query("tasks")
|
||||
.withIndex("by_user", (q) => q.eq("userId", args.userId))
|
||||
.collect();
|
||||
},
|
||||
});
|
||||
|
||||
export const create = mutation({
|
||||
args: {
|
||||
title: v.string(),
|
||||
userId: v.id("users"),
|
||||
},
|
||||
returns: v.id("tasks"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("tasks", {
|
||||
title: args.title,
|
||||
completed: false,
|
||||
userId: args.userId,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export const update = mutation({
|
||||
args: {
|
||||
taskId: v.id("tasks"),
|
||||
title: v.optional(v.string()),
|
||||
completed: v.optional(v.boolean()),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const { taskId, ...updates } = args;
|
||||
|
||||
// Remove undefined values
|
||||
const cleanUpdates = Object.fromEntries(
|
||||
Object.entries(updates).filter(([_, v]) => v !== undefined),
|
||||
);
|
||||
|
||||
if (Object.keys(cleanUpdates).length > 0) {
|
||||
await ctx.db.patch("tasks", taskId, cleanUpdates);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
export const remove = mutation({
|
||||
args: { taskId: v.id("tasks") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.delete("tasks", args.taskId);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Always define return validators for functions
|
||||
- Use indexes for all queries that filter data
|
||||
- Make mutations idempotent to handle retries gracefully
|
||||
- Use ConvexError for user-facing error messages
|
||||
- Organize functions by domain (users.ts, tasks.ts, etc.)
|
||||
- Use internal functions for sensitive operations
|
||||
- Leverage TypeScript's Id and Doc types
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Using filter instead of withIndex** - Always define indexes and use withIndex
|
||||
2. **Missing return validators** - Always specify the returns field
|
||||
3. **Non-idempotent mutations** - Check current state before updating
|
||||
4. **Reading before patching unnecessarily** - Patch directly when possible
|
||||
5. **Not handling null returns** - Document IDs might not exist
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- Best Practices: https://docs.convex.dev/understanding/best-practices/
|
||||
- Error Handling: https://docs.convex.dev/functions/error-handling
|
||||
- Write Conflicts: https://docs.convex.dev/error#1
|
||||
457
.claude/skills/convex-component-authoring/SKILL.md
Normal file
457
.claude/skills/convex-component-authoring/SKILL.md
Normal file
@@ -0,0 +1,457 @@
|
||||
---
|
||||
name: convex-component-authoring
|
||||
displayName: Convex Component Authoring
|
||||
description: How to create, structure, and publish self-contained Convex components with proper isolation, exports, and dependency management
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, components, reusable, packages, npm]
|
||||
---
|
||||
|
||||
# Convex Component Authoring
|
||||
|
||||
Create self-contained, reusable Convex components with proper isolation, exports, and dependency management for sharing across projects.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/components
|
||||
- Component Authoring: https://docs.convex.dev/components/authoring
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### What Are Convex Components?
|
||||
|
||||
Convex components are self-contained packages that include:
|
||||
- Database tables (isolated from the main app)
|
||||
- Functions (queries, mutations, actions)
|
||||
- TypeScript types and validators
|
||||
- Optional frontend hooks
|
||||
|
||||
### Component Structure
|
||||
|
||||
```
|
||||
my-convex-component/
|
||||
├── package.json
|
||||
├── tsconfig.json
|
||||
├── README.md
|
||||
├── src/
|
||||
│ ├── index.ts # Main exports
|
||||
│ ├── component.ts # Component definition
|
||||
│ ├── schema.ts # Component schema
|
||||
│ └── functions/
|
||||
│ ├── queries.ts
|
||||
│ ├── mutations.ts
|
||||
│ └── actions.ts
|
||||
└── convex.config.ts # Component configuration
|
||||
```
|
||||
|
||||
### Creating a Component
|
||||
|
||||
#### 1. Component Configuration
|
||||
|
||||
```typescript
|
||||
// convex.config.ts
|
||||
import { defineComponent } from "convex/server";
|
||||
|
||||
export default defineComponent("myComponent");
|
||||
```
|
||||
|
||||
#### 2. Component Schema
|
||||
|
||||
```typescript
|
||||
// src/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
// Tables are isolated to this component
|
||||
items: defineTable({
|
||||
name: v.string(),
|
||||
data: v.any(),
|
||||
createdAt: v.number(),
|
||||
}).index("by_name", ["name"]),
|
||||
|
||||
config: defineTable({
|
||||
key: v.string(),
|
||||
value: v.any(),
|
||||
}).index("by_key", ["key"]),
|
||||
});
|
||||
```
|
||||
|
||||
#### 3. Component Definition
|
||||
|
||||
```typescript
|
||||
// src/component.ts
|
||||
import { defineComponent, ComponentDefinition } from "convex/server";
|
||||
import schema from "./schema";
|
||||
import * as queries from "./functions/queries";
|
||||
import * as mutations from "./functions/mutations";
|
||||
|
||||
const component = defineComponent("myComponent", {
|
||||
schema,
|
||||
functions: {
|
||||
...queries,
|
||||
...mutations,
|
||||
},
|
||||
});
|
||||
|
||||
export default component;
|
||||
```
|
||||
|
||||
#### 4. Component Functions
|
||||
|
||||
```typescript
|
||||
// src/functions/queries.ts
|
||||
import { query } from "../_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const list = query({
|
||||
args: {
|
||||
limit: v.optional(v.number()),
|
||||
},
|
||||
returns: v.array(v.object({
|
||||
_id: v.id("items"),
|
||||
name: v.string(),
|
||||
data: v.any(),
|
||||
createdAt: v.number(),
|
||||
})),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query("items")
|
||||
.order("desc")
|
||||
.take(args.limit ?? 10);
|
||||
},
|
||||
});
|
||||
|
||||
export const get = query({
|
||||
args: { name: v.string() },
|
||||
returns: v.union(v.object({
|
||||
_id: v.id("items"),
|
||||
name: v.string(),
|
||||
data: v.any(),
|
||||
}), v.null()),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query("items")
|
||||
.withIndex("by_name", (q) => q.eq("name", args.name))
|
||||
.unique();
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// src/functions/mutations.ts
|
||||
import { mutation } from "../_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const create = mutation({
|
||||
args: {
|
||||
name: v.string(),
|
||||
data: v.any(),
|
||||
},
|
||||
returns: v.id("items"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("items", {
|
||||
name: args.name,
|
||||
data: args.data,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export const update = mutation({
|
||||
args: {
|
||||
id: v.id("items"),
|
||||
data: v.any(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.id, { data: args.data });
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
export const remove = mutation({
|
||||
args: { id: v.id("items") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.delete(args.id);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### 5. Main Exports
|
||||
|
||||
```typescript
|
||||
// src/index.ts
|
||||
export { default as component } from "./component";
|
||||
export * from "./functions/queries";
|
||||
export * from "./functions/mutations";
|
||||
|
||||
// Export types for consumers
|
||||
export type { Id } from "./_generated/dataModel";
|
||||
```
|
||||
|
||||
### Using a Component
|
||||
|
||||
```typescript
|
||||
// In the consuming app's convex/convex.config.ts
|
||||
import { defineApp } from "convex/server";
|
||||
import myComponent from "my-convex-component";
|
||||
|
||||
const app = defineApp();
|
||||
|
||||
app.use(myComponent, { name: "myComponent" });
|
||||
|
||||
export default app;
|
||||
```
|
||||
|
||||
```typescript
|
||||
// In the consuming app's code
|
||||
import { useQuery, useMutation } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function MyApp() {
|
||||
// Access component functions through the app's API
|
||||
const items = useQuery(api.myComponent.list, { limit: 10 });
|
||||
const createItem = useMutation(api.myComponent.create);
|
||||
|
||||
return (
|
||||
<div>
|
||||
{items?.map((item) => (
|
||||
<div key={item._id}>{item.name}</div>
|
||||
))}
|
||||
<button onClick={() => createItem({ name: "New", data: {} })}>
|
||||
Add Item
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Component Configuration Options
|
||||
|
||||
```typescript
|
||||
// convex/convex.config.ts
|
||||
import { defineApp } from "convex/server";
|
||||
import myComponent from "my-convex-component";
|
||||
|
||||
const app = defineApp();
|
||||
|
||||
// Basic usage
|
||||
app.use(myComponent);
|
||||
|
||||
// With custom name
|
||||
app.use(myComponent, { name: "customName" });
|
||||
|
||||
// Multiple instances
|
||||
app.use(myComponent, { name: "instance1" });
|
||||
app.use(myComponent, { name: "instance2" });
|
||||
|
||||
export default app;
|
||||
```
|
||||
|
||||
### Providing Component Hooks
|
||||
|
||||
```typescript
|
||||
// src/hooks.ts
|
||||
import { useQuery, useMutation } from "convex/react";
|
||||
import { FunctionReference } from "convex/server";
|
||||
|
||||
// Type-safe hooks for component consumers
|
||||
export function useMyComponent(api: {
|
||||
list: FunctionReference<"query">;
|
||||
create: FunctionReference<"mutation">;
|
||||
}) {
|
||||
const items = useQuery(api.list, {});
|
||||
const createItem = useMutation(api.create);
|
||||
|
||||
return {
|
||||
items,
|
||||
createItem,
|
||||
isLoading: items === undefined,
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Publishing a Component
|
||||
|
||||
#### package.json
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "my-convex-component",
|
||||
"version": "1.0.0",
|
||||
"description": "A reusable Convex component",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"files": [
|
||||
"dist",
|
||||
"convex.config.ts"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"prepublishOnly": "npm run build"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"convex": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"convex": "^1.17.0",
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"keywords": [
|
||||
"convex",
|
||||
"component"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### tsconfig.json
|
||||
|
||||
```json
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2020",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"declaration": true,
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Rate Limiter Component
|
||||
|
||||
```typescript
|
||||
// rate-limiter/src/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
requests: defineTable({
|
||||
key: v.string(),
|
||||
timestamp: v.number(),
|
||||
})
|
||||
.index("by_key", ["key"])
|
||||
.index("by_key_and_time", ["key", "timestamp"]),
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// rate-limiter/src/functions/mutations.ts
|
||||
import { mutation } from "../_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const checkLimit = mutation({
|
||||
args: {
|
||||
key: v.string(),
|
||||
limit: v.number(),
|
||||
windowMs: v.number(),
|
||||
},
|
||||
returns: v.object({
|
||||
allowed: v.boolean(),
|
||||
remaining: v.number(),
|
||||
resetAt: v.number(),
|
||||
}),
|
||||
handler: async (ctx, args) => {
|
||||
const now = Date.now();
|
||||
const windowStart = now - args.windowMs;
|
||||
|
||||
// Clean old entries
|
||||
const oldEntries = await ctx.db
|
||||
.query("requests")
|
||||
.withIndex("by_key_and_time", (q) =>
|
||||
q.eq("key", args.key).lt("timestamp", windowStart)
|
||||
)
|
||||
.collect();
|
||||
|
||||
for (const entry of oldEntries) {
|
||||
await ctx.db.delete(entry._id);
|
||||
}
|
||||
|
||||
// Count current window
|
||||
const currentRequests = await ctx.db
|
||||
.query("requests")
|
||||
.withIndex("by_key", (q) => q.eq("key", args.key))
|
||||
.collect();
|
||||
|
||||
const remaining = Math.max(0, args.limit - currentRequests.length);
|
||||
const allowed = remaining > 0;
|
||||
|
||||
if (allowed) {
|
||||
await ctx.db.insert("requests", {
|
||||
key: args.key,
|
||||
timestamp: now,
|
||||
});
|
||||
}
|
||||
|
||||
const oldestRequest = currentRequests[0];
|
||||
const resetAt = oldestRequest
|
||||
? oldestRequest.timestamp + args.windowMs
|
||||
: now + args.windowMs;
|
||||
|
||||
return { allowed, remaining: remaining - (allowed ? 1 : 0), resetAt };
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Usage in consuming app
|
||||
import { useMutation } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function useRateLimitedAction() {
|
||||
const checkLimit = useMutation(api.rateLimiter.checkLimit);
|
||||
|
||||
return async (action: () => Promise<void>) => {
|
||||
const result = await checkLimit({
|
||||
key: "user-action",
|
||||
limit: 10,
|
||||
windowMs: 60000,
|
||||
});
|
||||
|
||||
if (!result.allowed) {
|
||||
throw new Error(`Rate limited. Try again at ${new Date(result.resetAt)}`);
|
||||
}
|
||||
|
||||
await action();
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Keep component tables isolated (don't reference main app tables)
|
||||
- Export clear TypeScript types for consumers
|
||||
- Document all public functions and their arguments
|
||||
- Use semantic versioning for component releases
|
||||
- Include comprehensive README with examples
|
||||
- Test components in isolation before publishing
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Cross-referencing tables** - Component tables should be self-contained
|
||||
2. **Missing type exports** - Export all necessary types
|
||||
3. **Hardcoded configuration** - Use component options for customization
|
||||
4. **No versioning** - Follow semantic versioning
|
||||
5. **Poor documentation** - Document all public APIs
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- Components: https://docs.convex.dev/components
|
||||
- Component Authoring: https://docs.convex.dev/components/authoring
|
||||
604
.claude/skills/convex-cron-jobs/SKILL.md
Normal file
604
.claude/skills/convex-cron-jobs/SKILL.md
Normal file
@@ -0,0 +1,604 @@
|
||||
---
|
||||
name: convex-cron-jobs
|
||||
displayName: Convex Cron Jobs
|
||||
description: Scheduled function patterns for background tasks including interval scheduling, cron expressions, job monitoring, retry strategies, and best practices for long-running tasks
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, cron, scheduling, background-jobs, automation]
|
||||
---
|
||||
|
||||
# Convex Cron Jobs
|
||||
|
||||
Schedule recurring functions for background tasks, cleanup jobs, data syncing, and automated workflows in Convex applications.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/scheduling/cron-jobs
|
||||
- Scheduling Overview: https://docs.convex.dev/scheduling
|
||||
- Scheduled Functions: https://docs.convex.dev/scheduling/scheduled-functions
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### Cron Jobs Overview
|
||||
|
||||
Convex cron jobs allow you to schedule functions to run at regular intervals or specific times. Key features:
|
||||
|
||||
- Run functions on a fixed schedule
|
||||
- Support for interval-based and cron expression scheduling
|
||||
- Automatic retries on failure
|
||||
- Monitoring via the Convex dashboard
|
||||
|
||||
### Basic Cron Setup
|
||||
|
||||
```typescript
|
||||
// convex/crons.ts
|
||||
import { cronJobs } from "convex/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const crons = cronJobs();
|
||||
|
||||
// Run every hour
|
||||
crons.interval(
|
||||
"cleanup expired sessions",
|
||||
{ hours: 1 },
|
||||
internal.tasks.cleanupExpiredSessions,
|
||||
{}
|
||||
);
|
||||
|
||||
// Run every day at midnight UTC
|
||||
crons.cron(
|
||||
"daily report",
|
||||
"0 0 * * *",
|
||||
internal.reports.generateDailyReport,
|
||||
{}
|
||||
);
|
||||
|
||||
export default crons;
|
||||
```
|
||||
|
||||
### Interval-Based Scheduling
|
||||
|
||||
Use `crons.interval` for simple recurring tasks:
|
||||
|
||||
```typescript
|
||||
// convex/crons.ts
|
||||
import { cronJobs } from "convex/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const crons = cronJobs();
|
||||
|
||||
// Every 5 minutes
|
||||
crons.interval(
|
||||
"sync external data",
|
||||
{ minutes: 5 },
|
||||
internal.sync.fetchExternalData,
|
||||
{}
|
||||
);
|
||||
|
||||
// Every 2 hours
|
||||
crons.interval(
|
||||
"cleanup temp files",
|
||||
{ hours: 2 },
|
||||
internal.files.cleanupTempFiles,
|
||||
{}
|
||||
);
|
||||
|
||||
// Every 30 seconds (minimum interval)
|
||||
crons.interval(
|
||||
"health check",
|
||||
{ seconds: 30 },
|
||||
internal.monitoring.healthCheck,
|
||||
{}
|
||||
);
|
||||
|
||||
export default crons;
|
||||
```
|
||||
|
||||
### Cron Expression Scheduling
|
||||
|
||||
Use `crons.cron` for precise scheduling with cron expressions:
|
||||
|
||||
```typescript
|
||||
// convex/crons.ts
|
||||
import { cronJobs } from "convex/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const crons = cronJobs();
|
||||
|
||||
// Every day at 9 AM UTC
|
||||
crons.cron(
|
||||
"morning notifications",
|
||||
"0 9 * * *",
|
||||
internal.notifications.sendMorningDigest,
|
||||
{}
|
||||
);
|
||||
|
||||
// Every Monday at 8 AM UTC
|
||||
crons.cron(
|
||||
"weekly summary",
|
||||
"0 8 * * 1",
|
||||
internal.reports.generateWeeklySummary,
|
||||
{}
|
||||
);
|
||||
|
||||
// First day of every month at midnight
|
||||
crons.cron(
|
||||
"monthly billing",
|
||||
"0 0 1 * *",
|
||||
internal.billing.processMonthlyBilling,
|
||||
{}
|
||||
);
|
||||
|
||||
// Every 15 minutes
|
||||
crons.cron(
|
||||
"frequent sync",
|
||||
"*/15 * * * *",
|
||||
internal.sync.syncData,
|
||||
{}
|
||||
);
|
||||
|
||||
export default crons;
|
||||
```
|
||||
|
||||
### Cron Expression Reference
|
||||
|
||||
```
|
||||
┌───────────── minute (0-59)
|
||||
│ ┌───────────── hour (0-23)
|
||||
│ │ ┌───────────── day of month (1-31)
|
||||
│ │ │ ┌───────────── month (1-12)
|
||||
│ │ │ │ ┌───────────── day of week (0-6, Sunday=0)
|
||||
│ │ │ │ │
|
||||
* * * * *
|
||||
```
|
||||
|
||||
Common patterns:
|
||||
- `* * * * *` - Every minute
|
||||
- `0 * * * *` - Every hour
|
||||
- `0 0 * * *` - Every day at midnight
|
||||
- `0 0 * * 0` - Every Sunday at midnight
|
||||
- `0 0 1 * *` - First day of every month
|
||||
- `*/5 * * * *` - Every 5 minutes
|
||||
- `0 9-17 * * 1-5` - Every hour from 9 AM to 5 PM, Monday through Friday
|
||||
|
||||
### Internal Functions for Crons
|
||||
|
||||
Cron jobs should call internal functions for security:
|
||||
|
||||
```typescript
|
||||
// convex/tasks.ts
|
||||
import { internalMutation, internalQuery } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
// Cleanup expired sessions
|
||||
export const cleanupExpiredSessions = internalMutation({
|
||||
args: {},
|
||||
returns: v.number(),
|
||||
handler: async (ctx) => {
|
||||
const oneHourAgo = Date.now() - 60 * 60 * 1000;
|
||||
|
||||
const expiredSessions = await ctx.db
|
||||
.query("sessions")
|
||||
.withIndex("by_lastActive")
|
||||
.filter((q) => q.lt(q.field("lastActive"), oneHourAgo))
|
||||
.collect();
|
||||
|
||||
for (const session of expiredSessions) {
|
||||
await ctx.db.delete(session._id);
|
||||
}
|
||||
|
||||
return expiredSessions.length;
|
||||
},
|
||||
});
|
||||
|
||||
// Process pending tasks
|
||||
export const processPendingTasks = internalMutation({
|
||||
args: {},
|
||||
returns: v.null(),
|
||||
handler: async (ctx) => {
|
||||
const pendingTasks = await ctx.db
|
||||
.query("tasks")
|
||||
.withIndex("by_status", (q) => q.eq("status", "pending"))
|
||||
.take(100);
|
||||
|
||||
for (const task of pendingTasks) {
|
||||
await ctx.db.patch(task._id, {
|
||||
status: "processing",
|
||||
startedAt: Date.now(),
|
||||
});
|
||||
|
||||
// Schedule the actual processing
|
||||
await ctx.scheduler.runAfter(0, internal.tasks.processTask, {
|
||||
taskId: task._id,
|
||||
});
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Cron Jobs with Arguments
|
||||
|
||||
Pass static arguments to cron jobs:
|
||||
|
||||
```typescript
|
||||
// convex/crons.ts
|
||||
import { cronJobs } from "convex/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const crons = cronJobs();
|
||||
|
||||
// Different cleanup intervals for different types
|
||||
crons.interval(
|
||||
"cleanup temp files",
|
||||
{ hours: 1 },
|
||||
internal.cleanup.cleanupByType,
|
||||
{ fileType: "temp", maxAge: 3600000 }
|
||||
);
|
||||
|
||||
crons.interval(
|
||||
"cleanup cache files",
|
||||
{ hours: 24 },
|
||||
internal.cleanup.cleanupByType,
|
||||
{ fileType: "cache", maxAge: 86400000 }
|
||||
);
|
||||
|
||||
export default crons;
|
||||
```
|
||||
|
||||
```typescript
|
||||
// convex/cleanup.ts
|
||||
import { internalMutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const cleanupByType = internalMutation({
|
||||
args: {
|
||||
fileType: v.string(),
|
||||
maxAge: v.number(),
|
||||
},
|
||||
returns: v.number(),
|
||||
handler: async (ctx, args) => {
|
||||
const cutoff = Date.now() - args.maxAge;
|
||||
|
||||
const oldFiles = await ctx.db
|
||||
.query("files")
|
||||
.withIndex("by_type_and_created", (q) =>
|
||||
q.eq("type", args.fileType).lt("createdAt", cutoff)
|
||||
)
|
||||
.collect();
|
||||
|
||||
for (const file of oldFiles) {
|
||||
await ctx.storage.delete(file.storageId);
|
||||
await ctx.db.delete(file._id);
|
||||
}
|
||||
|
||||
return oldFiles.length;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Monitoring and Logging
|
||||
|
||||
Add logging to track cron job execution:
|
||||
|
||||
```typescript
|
||||
// convex/tasks.ts
|
||||
import { internalMutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const cleanupWithLogging = internalMutation({
|
||||
args: {},
|
||||
returns: v.null(),
|
||||
handler: async (ctx) => {
|
||||
const startTime = Date.now();
|
||||
let processedCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
try {
|
||||
const expiredItems = await ctx.db
|
||||
.query("items")
|
||||
.withIndex("by_expiresAt")
|
||||
.filter((q) => q.lt(q.field("expiresAt"), Date.now()))
|
||||
.collect();
|
||||
|
||||
for (const item of expiredItems) {
|
||||
try {
|
||||
await ctx.db.delete(item._id);
|
||||
processedCount++;
|
||||
} catch (error) {
|
||||
errorCount++;
|
||||
console.error(`Failed to delete item ${item._id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Log job completion
|
||||
await ctx.db.insert("cronLogs", {
|
||||
jobName: "cleanup",
|
||||
startTime,
|
||||
endTime: Date.now(),
|
||||
duration: Date.now() - startTime,
|
||||
processedCount,
|
||||
errorCount,
|
||||
status: errorCount === 0 ? "success" : "partial",
|
||||
});
|
||||
} catch (error) {
|
||||
// Log job failure
|
||||
await ctx.db.insert("cronLogs", {
|
||||
jobName: "cleanup",
|
||||
startTime,
|
||||
endTime: Date.now(),
|
||||
duration: Date.now() - startTime,
|
||||
processedCount,
|
||||
errorCount,
|
||||
status: "failed",
|
||||
error: String(error),
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Batching for Large Datasets
|
||||
|
||||
Handle large datasets in batches to avoid timeouts:
|
||||
|
||||
```typescript
|
||||
// convex/tasks.ts
|
||||
import { internalMutation } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
import { v } from "convex/values";
|
||||
|
||||
const BATCH_SIZE = 100;
|
||||
|
||||
export const processBatch = internalMutation({
|
||||
args: {
|
||||
cursor: v.optional(v.string()),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const result = await ctx.db
|
||||
.query("items")
|
||||
.withIndex("by_status", (q) => q.eq("status", "pending"))
|
||||
.paginate({ numItems: BATCH_SIZE, cursor: args.cursor ?? null });
|
||||
|
||||
for (const item of result.page) {
|
||||
await ctx.db.patch(item._id, {
|
||||
status: "processed",
|
||||
processedAt: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
// Schedule next batch if there are more items
|
||||
if (!result.isDone) {
|
||||
await ctx.scheduler.runAfter(0, internal.tasks.processBatch, {
|
||||
cursor: result.continueCursor,
|
||||
});
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### External API Calls in Crons
|
||||
|
||||
Use actions for external API calls:
|
||||
|
||||
```typescript
|
||||
// convex/sync.ts
|
||||
"use node";
|
||||
|
||||
import { internalAction } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const syncExternalData = internalAction({
|
||||
args: {},
|
||||
returns: v.null(),
|
||||
handler: async (ctx) => {
|
||||
// Fetch from external API
|
||||
const response = await fetch("https://api.example.com/data", {
|
||||
headers: {
|
||||
Authorization: `Bearer ${process.env.API_KEY}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed: ${response.status}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Store the data using a mutation
|
||||
await ctx.runMutation(internal.sync.storeExternalData, {
|
||||
data,
|
||||
syncedAt: Date.now(),
|
||||
});
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
export const storeExternalData = internalMutation({
|
||||
args: {
|
||||
data: v.any(),
|
||||
syncedAt: v.number(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.insert("externalData", {
|
||||
data: args.data,
|
||||
syncedAt: args.syncedAt,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// convex/crons.ts
|
||||
import { cronJobs } from "convex/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const crons = cronJobs();
|
||||
|
||||
crons.interval(
|
||||
"sync external data",
|
||||
{ minutes: 15 },
|
||||
internal.sync.syncExternalData,
|
||||
{}
|
||||
);
|
||||
|
||||
export default crons;
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Schema for Cron Job Logging
|
||||
|
||||
```typescript
|
||||
// convex/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
cronLogs: defineTable({
|
||||
jobName: v.string(),
|
||||
startTime: v.number(),
|
||||
endTime: v.number(),
|
||||
duration: v.number(),
|
||||
processedCount: v.number(),
|
||||
errorCount: v.number(),
|
||||
status: v.union(
|
||||
v.literal("success"),
|
||||
v.literal("partial"),
|
||||
v.literal("failed")
|
||||
),
|
||||
error: v.optional(v.string()),
|
||||
})
|
||||
.index("by_job", ["jobName"])
|
||||
.index("by_status", ["status"])
|
||||
.index("by_startTime", ["startTime"]),
|
||||
|
||||
sessions: defineTable({
|
||||
userId: v.id("users"),
|
||||
token: v.string(),
|
||||
lastActive: v.number(),
|
||||
expiresAt: v.number(),
|
||||
})
|
||||
.index("by_user", ["userId"])
|
||||
.index("by_lastActive", ["lastActive"])
|
||||
.index("by_expiresAt", ["expiresAt"]),
|
||||
|
||||
tasks: defineTable({
|
||||
type: v.string(),
|
||||
status: v.union(
|
||||
v.literal("pending"),
|
||||
v.literal("processing"),
|
||||
v.literal("completed"),
|
||||
v.literal("failed")
|
||||
),
|
||||
data: v.any(),
|
||||
createdAt: v.number(),
|
||||
startedAt: v.optional(v.number()),
|
||||
completedAt: v.optional(v.number()),
|
||||
})
|
||||
.index("by_status", ["status"])
|
||||
.index("by_type_and_status", ["type", "status"]),
|
||||
});
|
||||
```
|
||||
|
||||
### Complete Cron Configuration Example
|
||||
|
||||
```typescript
|
||||
// convex/crons.ts
|
||||
import { cronJobs } from "convex/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const crons = cronJobs();
|
||||
|
||||
// Cleanup jobs
|
||||
crons.interval(
|
||||
"cleanup expired sessions",
|
||||
{ hours: 1 },
|
||||
internal.cleanup.expiredSessions,
|
||||
{}
|
||||
);
|
||||
|
||||
crons.interval(
|
||||
"cleanup old logs",
|
||||
{ hours: 24 },
|
||||
internal.cleanup.oldLogs,
|
||||
{ maxAgeDays: 30 }
|
||||
);
|
||||
|
||||
// Sync jobs
|
||||
crons.interval(
|
||||
"sync user data",
|
||||
{ minutes: 15 },
|
||||
internal.sync.userData,
|
||||
{}
|
||||
);
|
||||
|
||||
// Report jobs
|
||||
crons.cron(
|
||||
"daily analytics",
|
||||
"0 1 * * *",
|
||||
internal.reports.dailyAnalytics,
|
||||
{}
|
||||
);
|
||||
|
||||
crons.cron(
|
||||
"weekly summary",
|
||||
"0 9 * * 1",
|
||||
internal.reports.weeklySummary,
|
||||
{}
|
||||
);
|
||||
|
||||
// Health checks
|
||||
crons.interval(
|
||||
"service health check",
|
||||
{ minutes: 5 },
|
||||
internal.monitoring.healthCheck,
|
||||
{}
|
||||
);
|
||||
|
||||
export default crons;
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Only use `crons.interval` or `crons.cron` methods, not deprecated helpers
|
||||
- Always call internal functions from cron jobs for security
|
||||
- Import `internal` from `_generated/api` even for functions in the same file
|
||||
- Add logging and monitoring for production cron jobs
|
||||
- Use batching for operations that process large datasets
|
||||
- Handle errors gracefully to prevent job failures
|
||||
- Use meaningful job names for dashboard visibility
|
||||
- Consider timezone when using cron expressions (Convex uses UTC)
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Using public functions** - Cron jobs should call internal functions only
|
||||
2. **Long-running mutations** - Break large operations into batches
|
||||
3. **Missing error handling** - Unhandled errors will fail the entire job
|
||||
4. **Forgetting timezone** - All cron expressions use UTC
|
||||
5. **Using deprecated helpers** - Avoid `crons.hourly`, `crons.daily`, etc.
|
||||
6. **Not logging execution** - Makes debugging production issues difficult
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- Cron Jobs: https://docs.convex.dev/scheduling/cron-jobs
|
||||
- Scheduling Overview: https://docs.convex.dev/scheduling
|
||||
- Scheduled Functions: https://docs.convex.dev/scheduling/scheduled-functions
|
||||
467
.claude/skills/convex-file-storage/SKILL.md
Normal file
467
.claude/skills/convex-file-storage/SKILL.md
Normal file
@@ -0,0 +1,467 @@
|
||||
---
|
||||
name: convex-file-storage
|
||||
displayName: Convex File Storage
|
||||
description: Complete file handling including upload flows, serving files via URL, storing generated files from actions, deletion, and accessing file metadata from system tables
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, file-storage, uploads, images, files]
|
||||
---
|
||||
|
||||
# Convex File Storage
|
||||
|
||||
Handle file uploads, storage, serving, and management in Convex applications with proper patterns for images, documents, and generated files.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/file-storage
|
||||
- Upload Files: https://docs.convex.dev/file-storage/upload-files
|
||||
- Serve Files: https://docs.convex.dev/file-storage/serve-files
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### File Storage Overview
|
||||
|
||||
Convex provides built-in file storage with:
|
||||
- Automatic URL generation for serving files
|
||||
- Support for any file type (images, PDFs, videos, etc.)
|
||||
- File metadata via the `_storage` system table
|
||||
- Integration with mutations and actions
|
||||
|
||||
### Generating Upload URLs
|
||||
|
||||
```typescript
|
||||
// convex/files.ts
|
||||
import { mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const generateUploadUrl = mutation({
|
||||
args: {},
|
||||
returns: v.string(),
|
||||
handler: async (ctx) => {
|
||||
return await ctx.storage.generateUploadUrl();
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Client-Side Upload
|
||||
|
||||
```typescript
|
||||
// React component
|
||||
import { useMutation } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
import { useState } from "react";
|
||||
|
||||
function FileUploader() {
|
||||
const generateUploadUrl = useMutation(api.files.generateUploadUrl);
|
||||
const saveFile = useMutation(api.files.saveFile);
|
||||
const [uploading, setUploading] = useState(false);
|
||||
|
||||
const handleUpload = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (!file) return;
|
||||
|
||||
setUploading(true);
|
||||
try {
|
||||
// Step 1: Get upload URL
|
||||
const uploadUrl = await generateUploadUrl();
|
||||
|
||||
// Step 2: Upload file to storage
|
||||
const result = await fetch(uploadUrl, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": file.type },
|
||||
body: file,
|
||||
});
|
||||
|
||||
const { storageId } = await result.json();
|
||||
|
||||
// Step 3: Save file reference to database
|
||||
await saveFile({
|
||||
storageId,
|
||||
fileName: file.name,
|
||||
fileType: file.type,
|
||||
fileSize: file.size,
|
||||
});
|
||||
} finally {
|
||||
setUploading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<input
|
||||
type="file"
|
||||
onChange={handleUpload}
|
||||
disabled={uploading}
|
||||
/>
|
||||
{uploading && <p>Uploading...</p>}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Saving File References
|
||||
|
||||
```typescript
|
||||
// convex/files.ts
|
||||
import { mutation, query } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const saveFile = mutation({
|
||||
args: {
|
||||
storageId: v.id("_storage"),
|
||||
fileName: v.string(),
|
||||
fileType: v.string(),
|
||||
fileSize: v.number(),
|
||||
},
|
||||
returns: v.id("files"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("files", {
|
||||
storageId: args.storageId,
|
||||
fileName: args.fileName,
|
||||
fileType: args.fileType,
|
||||
fileSize: args.fileSize,
|
||||
uploadedAt: Date.now(),
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Serving Files via URL
|
||||
|
||||
```typescript
|
||||
// convex/files.ts
|
||||
export const getFileUrl = query({
|
||||
args: { storageId: v.id("_storage") },
|
||||
returns: v.union(v.string(), v.null()),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.storage.getUrl(args.storageId);
|
||||
},
|
||||
});
|
||||
|
||||
// Get file with URL
|
||||
export const getFile = query({
|
||||
args: { fileId: v.id("files") },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id("files"),
|
||||
fileName: v.string(),
|
||||
fileType: v.string(),
|
||||
fileSize: v.number(),
|
||||
url: v.union(v.string(), v.null()),
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const file = await ctx.db.get(args.fileId);
|
||||
if (!file) return null;
|
||||
|
||||
const url = await ctx.storage.getUrl(file.storageId);
|
||||
|
||||
return {
|
||||
_id: file._id,
|
||||
fileName: file.fileName,
|
||||
fileType: file.fileType,
|
||||
fileSize: file.fileSize,
|
||||
url,
|
||||
};
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Displaying Files in React
|
||||
|
||||
```typescript
|
||||
import { useQuery } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function FileDisplay({ fileId }: { fileId: Id<"files"> }) {
|
||||
const file = useQuery(api.files.getFile, { fileId });
|
||||
|
||||
if (!file) return <div>Loading...</div>;
|
||||
if (!file.url) return <div>File not found</div>;
|
||||
|
||||
// Handle different file types
|
||||
if (file.fileType.startsWith("image/")) {
|
||||
return <img src={file.url} alt={file.fileName} />;
|
||||
}
|
||||
|
||||
if (file.fileType === "application/pdf") {
|
||||
return (
|
||||
<iframe
|
||||
src={file.url}
|
||||
title={file.fileName}
|
||||
width="100%"
|
||||
height="600px"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<a href={file.url} download={file.fileName}>
|
||||
Download {file.fileName}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Storing Generated Files from Actions
|
||||
|
||||
```typescript
|
||||
// convex/generate.ts
|
||||
"use node";
|
||||
|
||||
import { action } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { api } from "./_generated/api";
|
||||
|
||||
export const generatePDF = action({
|
||||
args: { content: v.string() },
|
||||
returns: v.id("_storage"),
|
||||
handler: async (ctx, args) => {
|
||||
// Generate PDF (example using a library)
|
||||
const pdfBuffer = await generatePDFFromContent(args.content);
|
||||
|
||||
// Convert to Blob
|
||||
const blob = new Blob([pdfBuffer], { type: "application/pdf" });
|
||||
|
||||
// Store in Convex
|
||||
const storageId = await ctx.storage.store(blob);
|
||||
|
||||
return storageId;
|
||||
},
|
||||
});
|
||||
|
||||
// Generate and save image
|
||||
export const generateImage = action({
|
||||
args: { prompt: v.string() },
|
||||
returns: v.id("_storage"),
|
||||
handler: async (ctx, args) => {
|
||||
// Call external API to generate image
|
||||
const response = await fetch("https://api.example.com/generate", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ prompt: args.prompt }),
|
||||
});
|
||||
|
||||
const imageBuffer = await response.arrayBuffer();
|
||||
const blob = new Blob([imageBuffer], { type: "image/png" });
|
||||
|
||||
return await ctx.storage.store(blob);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Accessing File Metadata
|
||||
|
||||
```typescript
|
||||
// convex/files.ts
|
||||
import { query } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { Id } from "./_generated/dataModel";
|
||||
|
||||
type FileMetadata = {
|
||||
_id: Id<"_storage">;
|
||||
_creationTime: number;
|
||||
contentType?: string;
|
||||
sha256: string;
|
||||
size: number;
|
||||
};
|
||||
|
||||
export const getFileMetadata = query({
|
||||
args: { storageId: v.id("_storage") },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id("_storage"),
|
||||
_creationTime: v.number(),
|
||||
contentType: v.optional(v.string()),
|
||||
sha256: v.string(),
|
||||
size: v.number(),
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const metadata = await ctx.db.system.get(args.storageId);
|
||||
return metadata as FileMetadata | null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Deleting Files
|
||||
|
||||
```typescript
|
||||
// convex/files.ts
|
||||
import { mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const deleteFile = mutation({
|
||||
args: { fileId: v.id("files") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const file = await ctx.db.get(args.fileId);
|
||||
if (!file) return null;
|
||||
|
||||
// Delete from storage
|
||||
await ctx.storage.delete(file.storageId);
|
||||
|
||||
// Delete database record
|
||||
await ctx.db.delete(args.fileId);
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Image Upload with Preview
|
||||
|
||||
```typescript
|
||||
import { useMutation } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
import { useState, useRef } from "react";
|
||||
|
||||
function ImageUploader({ onUpload }: { onUpload: (id: Id<"files">) => void }) {
|
||||
const generateUploadUrl = useMutation(api.files.generateUploadUrl);
|
||||
const saveFile = useMutation(api.files.saveFile);
|
||||
const [preview, setPreview] = useState<string | null>(null);
|
||||
const [uploading, setUploading] = useState(false);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const handleFileSelect = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (!file) return;
|
||||
|
||||
// Validate file type
|
||||
if (!file.type.startsWith("image/")) {
|
||||
alert("Please select an image file");
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate file size (max 10MB)
|
||||
if (file.size > 10 * 1024 * 1024) {
|
||||
alert("File size must be less than 10MB");
|
||||
return;
|
||||
}
|
||||
|
||||
// Show preview
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => setPreview(e.target?.result as string);
|
||||
reader.readAsDataURL(file);
|
||||
|
||||
// Upload
|
||||
setUploading(true);
|
||||
try {
|
||||
const uploadUrl = await generateUploadUrl();
|
||||
const result = await fetch(uploadUrl, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": file.type },
|
||||
body: file,
|
||||
});
|
||||
|
||||
const { storageId } = await result.json();
|
||||
const fileId = await saveFile({
|
||||
storageId,
|
||||
fileName: file.name,
|
||||
fileType: file.type,
|
||||
fileSize: file.size,
|
||||
});
|
||||
|
||||
onUpload(fileId);
|
||||
} finally {
|
||||
setUploading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<input
|
||||
ref={inputRef}
|
||||
type="file"
|
||||
accept="image/*"
|
||||
onChange={handleFileSelect}
|
||||
style={{ display: "none" }}
|
||||
/>
|
||||
|
||||
<button
|
||||
onClick={() => inputRef.current?.click()}
|
||||
disabled={uploading}
|
||||
>
|
||||
{uploading ? "Uploading..." : "Select Image"}
|
||||
</button>
|
||||
|
||||
{preview && (
|
||||
<img
|
||||
src={preview}
|
||||
alt="Preview"
|
||||
style={{ maxWidth: 200, marginTop: 10 }}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Schema for File Storage
|
||||
|
||||
```typescript
|
||||
// convex/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
files: defineTable({
|
||||
storageId: v.id("_storage"),
|
||||
fileName: v.string(),
|
||||
fileType: v.string(),
|
||||
fileSize: v.number(),
|
||||
uploadedBy: v.id("users"),
|
||||
uploadedAt: v.number(),
|
||||
})
|
||||
.index("by_user", ["uploadedBy"])
|
||||
.index("by_type", ["fileType"]),
|
||||
|
||||
// User avatars
|
||||
users: defineTable({
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
avatarStorageId: v.optional(v.id("_storage")),
|
||||
}),
|
||||
|
||||
// Posts with images
|
||||
posts: defineTable({
|
||||
authorId: v.id("users"),
|
||||
content: v.string(),
|
||||
imageStorageIds: v.array(v.id("_storage")),
|
||||
createdAt: v.number(),
|
||||
}).index("by_author", ["authorId"]),
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Validate file types and sizes on the client before uploading
|
||||
- Store file metadata (name, type, size) in your own table
|
||||
- Use the `_storage` system table only for Convex metadata
|
||||
- Delete storage files when deleting database references
|
||||
- Use appropriate Content-Type headers when uploading
|
||||
- Consider image optimization for large images
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Not setting Content-Type header** - Files may not serve correctly
|
||||
2. **Forgetting to delete storage** - Orphaned files waste storage
|
||||
3. **Not validating file types** - Security risk for malicious uploads
|
||||
4. **Large file uploads without progress** - Poor UX for users
|
||||
5. **Using deprecated getMetadata** - Use ctx.db.system.get instead
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- File Storage: https://docs.convex.dev/file-storage
|
||||
- Upload Files: https://docs.convex.dev/file-storage/upload-files
|
||||
- Serve Files: https://docs.convex.dev/file-storage/serve-files
|
||||
458
.claude/skills/convex-functions/SKILL.md
Normal file
458
.claude/skills/convex-functions/SKILL.md
Normal file
@@ -0,0 +1,458 @@
|
||||
---
|
||||
name: convex-functions
|
||||
displayName: Convex Functions
|
||||
description: Writing queries, mutations, actions, and HTTP actions with proper argument validation, error handling, internal functions, and runtime considerations
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, functions, queries, mutations, actions, http]
|
||||
---
|
||||
|
||||
# Convex Functions
|
||||
|
||||
Master Convex functions including queries, mutations, actions, and HTTP endpoints with proper validation, error handling, and runtime considerations.
|
||||
|
||||
## Code Quality
|
||||
|
||||
All examples in this skill comply with @convex-dev/eslint-plugin rules:
|
||||
|
||||
- Object syntax with `handler` property
|
||||
- Argument validators on all functions
|
||||
- Explicit table names in database operations
|
||||
|
||||
See the Code Quality section in [convex-best-practices](../convex-best-practices/SKILL.md) for linting setup.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/functions
|
||||
- Query Functions: https://docs.convex.dev/functions/query-functions
|
||||
- Mutation Functions: https://docs.convex.dev/functions/mutation-functions
|
||||
- Actions: https://docs.convex.dev/functions/actions
|
||||
- HTTP Actions: https://docs.convex.dev/functions/http-actions
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### Function Types Overview
|
||||
|
||||
| Type | Database Access | External APIs | Caching | Use Case |
|
||||
| ----------- | ------------------------ | ------------- | ------------- | --------------------- |
|
||||
| Query | Read-only | No | Yes, reactive | Fetching data |
|
||||
| Mutation | Read/Write | No | No | Modifying data |
|
||||
| Action | Via runQuery/runMutation | Yes | No | External integrations |
|
||||
| HTTP Action | Via runQuery/runMutation | Yes | No | Webhooks, APIs |
|
||||
|
||||
### Queries
|
||||
|
||||
Queries are reactive, cached, and read-only:
|
||||
|
||||
```typescript
|
||||
import { query } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const getUser = query({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id("users"),
|
||||
_creationTime: v.number(),
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
}),
|
||||
v.null(),
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.get("users", args.userId);
|
||||
},
|
||||
});
|
||||
|
||||
// Query with index
|
||||
export const listUserTasks = query({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.array(
|
||||
v.object({
|
||||
_id: v.id("tasks"),
|
||||
_creationTime: v.number(),
|
||||
title: v.string(),
|
||||
completed: v.boolean(),
|
||||
}),
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query("tasks")
|
||||
.withIndex("by_user", (q) => q.eq("userId", args.userId))
|
||||
.order("desc")
|
||||
.collect();
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Mutations
|
||||
|
||||
Mutations modify the database and are transactional:
|
||||
|
||||
```typescript
|
||||
import { mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { ConvexError } from "convex/values";
|
||||
|
||||
export const createTask = mutation({
|
||||
args: {
|
||||
title: v.string(),
|
||||
userId: v.id("users"),
|
||||
},
|
||||
returns: v.id("tasks"),
|
||||
handler: async (ctx, args) => {
|
||||
// Validate user exists
|
||||
const user = await ctx.db.get("users", args.userId);
|
||||
if (!user) {
|
||||
throw new ConvexError("User not found");
|
||||
}
|
||||
|
||||
return await ctx.db.insert("tasks", {
|
||||
title: args.title,
|
||||
userId: args.userId,
|
||||
completed: false,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
export const deleteTask = mutation({
|
||||
args: { taskId: v.id("tasks") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.delete("tasks", args.taskId);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Actions
|
||||
|
||||
Actions can call external APIs but have no direct database access:
|
||||
|
||||
```typescript
|
||||
"use node";
|
||||
|
||||
import { action } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { api, internal } from "./_generated/api";
|
||||
|
||||
export const sendEmail = action({
|
||||
args: {
|
||||
to: v.string(),
|
||||
subject: v.string(),
|
||||
body: v.string(),
|
||||
},
|
||||
returns: v.object({ success: v.boolean() }),
|
||||
handler: async (ctx, args) => {
|
||||
// Call external API
|
||||
const response = await fetch("https://api.email.com/send", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(args),
|
||||
});
|
||||
|
||||
return { success: response.ok };
|
||||
},
|
||||
});
|
||||
|
||||
// Action calling queries and mutations
|
||||
export const processOrder = action({
|
||||
args: { orderId: v.id("orders") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// Read data via query
|
||||
const order = await ctx.runQuery(api.orders.get, { orderId: args.orderId });
|
||||
|
||||
if (!order) {
|
||||
throw new Error("Order not found");
|
||||
}
|
||||
|
||||
// Call external payment API
|
||||
const paymentResult = await processPayment(order);
|
||||
|
||||
// Update database via mutation
|
||||
await ctx.runMutation(internal.orders.updateStatus, {
|
||||
orderId: args.orderId,
|
||||
status: paymentResult.success ? "paid" : "failed",
|
||||
});
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### HTTP Actions
|
||||
|
||||
HTTP actions handle webhooks and external requests:
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
import { api, internal } from "./_generated/api";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// Webhook endpoint
|
||||
http.route({
|
||||
path: "/webhooks/stripe",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const signature = request.headers.get("stripe-signature");
|
||||
const body = await request.text();
|
||||
|
||||
// Verify webhook signature
|
||||
if (!verifyStripeSignature(body, signature)) {
|
||||
return new Response("Invalid signature", { status: 401 });
|
||||
}
|
||||
|
||||
const event = JSON.parse(body);
|
||||
|
||||
// Process webhook
|
||||
await ctx.runMutation(internal.payments.handleWebhook, {
|
||||
eventType: event.type,
|
||||
data: event.data,
|
||||
});
|
||||
|
||||
return new Response("OK", { status: 200 });
|
||||
}),
|
||||
});
|
||||
|
||||
// API endpoint
|
||||
http.route({
|
||||
path: "/api/users/:userId",
|
||||
method: "GET",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const url = new URL(request.url);
|
||||
const userId = url.pathname.split("/").pop();
|
||||
|
||||
const user = await ctx.runQuery(api.users.get, {
|
||||
userId: userId as Id<"users">,
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
return new Response("Not found", { status: 404 });
|
||||
}
|
||||
|
||||
return Response.json(user);
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### Internal Functions
|
||||
|
||||
Use internal functions for sensitive operations:
|
||||
|
||||
```typescript
|
||||
import {
|
||||
internalMutation,
|
||||
internalQuery,
|
||||
internalAction,
|
||||
} from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
// Only callable from other Convex functions
|
||||
export const _updateUserCredits = internalMutation({
|
||||
args: {
|
||||
userId: v.id("users"),
|
||||
amount: v.number(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const user = await ctx.db.get("users", args.userId);
|
||||
if (!user) return null;
|
||||
|
||||
await ctx.db.patch("users", args.userId, {
|
||||
credits: (user.credits || 0) + args.amount,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// Call internal function from action
|
||||
export const purchaseCredits = action({
|
||||
args: { userId: v.id("users"), amount: v.number() },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// Process payment externally
|
||||
await processPayment(args.amount);
|
||||
|
||||
// Update credits via internal mutation
|
||||
await ctx.runMutation(internal.users._updateUserCredits, {
|
||||
userId: args.userId,
|
||||
amount: args.amount,
|
||||
});
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Scheduling Functions
|
||||
|
||||
Schedule functions to run later:
|
||||
|
||||
```typescript
|
||||
import { mutation, internalMutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
export const scheduleReminder = mutation({
|
||||
args: {
|
||||
userId: v.id("users"),
|
||||
message: v.string(),
|
||||
delayMs: v.number(),
|
||||
},
|
||||
returns: v.id("_scheduled_functions"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.scheduler.runAfter(
|
||||
args.delayMs,
|
||||
internal.notifications.sendReminder,
|
||||
{ userId: args.userId, message: args.message },
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
export const sendReminder = internalMutation({
|
||||
args: {
|
||||
userId: v.id("users"),
|
||||
message: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.insert("notifications", {
|
||||
userId: args.userId,
|
||||
message: args.message,
|
||||
sentAt: Date.now(),
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Complete Function File
|
||||
|
||||
```typescript
|
||||
// convex/messages.ts
|
||||
import { query, mutation, internalMutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { ConvexError } from "convex/values";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const messageValidator = v.object({
|
||||
_id: v.id("messages"),
|
||||
_creationTime: v.number(),
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.id("users"),
|
||||
content: v.string(),
|
||||
editedAt: v.optional(v.number()),
|
||||
});
|
||||
|
||||
// Public query
|
||||
export const list = query({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
limit: v.optional(v.number()),
|
||||
},
|
||||
returns: v.array(messageValidator),
|
||||
handler: async (ctx, args) => {
|
||||
const limit = args.limit ?? 50;
|
||||
return await ctx.db
|
||||
.query("messages")
|
||||
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
|
||||
.order("desc")
|
||||
.take(limit);
|
||||
},
|
||||
});
|
||||
|
||||
// Public mutation
|
||||
export const send = mutation({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.id("users"),
|
||||
content: v.string(),
|
||||
},
|
||||
returns: v.id("messages"),
|
||||
handler: async (ctx, args) => {
|
||||
if (args.content.trim().length === 0) {
|
||||
throw new ConvexError("Message cannot be empty");
|
||||
}
|
||||
|
||||
const messageId = await ctx.db.insert("messages", {
|
||||
channelId: args.channelId,
|
||||
authorId: args.authorId,
|
||||
content: args.content.trim(),
|
||||
});
|
||||
|
||||
// Schedule notification
|
||||
await ctx.scheduler.runAfter(0, internal.messages.notifySubscribers, {
|
||||
channelId: args.channelId,
|
||||
messageId,
|
||||
});
|
||||
|
||||
return messageId;
|
||||
},
|
||||
});
|
||||
|
||||
// Internal mutation
|
||||
export const notifySubscribers = internalMutation({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
messageId: v.id("messages"),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// Get channel subscribers and notify them
|
||||
const subscribers = await ctx.db
|
||||
.query("subscriptions")
|
||||
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
|
||||
.collect();
|
||||
|
||||
for (const sub of subscribers) {
|
||||
await ctx.db.insert("notifications", {
|
||||
userId: sub.userId,
|
||||
messageId: args.messageId,
|
||||
read: false,
|
||||
});
|
||||
}
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Always define args and returns validators
|
||||
- Use queries for read operations (they are cached and reactive)
|
||||
- Use mutations for write operations (they are transactional)
|
||||
- Use actions only when calling external APIs
|
||||
- Use internal functions for sensitive operations
|
||||
- Add `"use node";` at the top of action files using Node.js APIs
|
||||
- Handle errors with ConvexError for user-facing messages
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Using actions for database operations** - Use queries/mutations instead
|
||||
2. **Calling external APIs from queries/mutations** - Use actions
|
||||
3. **Forgetting to add "use node"** - Required for Node.js APIs in actions
|
||||
4. **Missing return validators** - Always specify returns
|
||||
5. **Not using internal functions for sensitive logic** - Protect with internalMutation
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- Functions Overview: https://docs.convex.dev/functions
|
||||
- Query Functions: https://docs.convex.dev/functions/query-functions
|
||||
- Mutation Functions: https://docs.convex.dev/functions/mutation-functions
|
||||
- Actions: https://docs.convex.dev/functions/actions
|
||||
733
.claude/skills/convex-http-actions/SKILL.md
Normal file
733
.claude/skills/convex-http-actions/SKILL.md
Normal file
@@ -0,0 +1,733 @@
|
||||
---
|
||||
name: convex-http-actions
|
||||
displayName: Convex HTTP Actions
|
||||
description: External API integration and webhook handling including HTTP endpoint routing, request/response handling, authentication, CORS configuration, and webhook signature validation
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, http, actions, webhooks, api, endpoints]
|
||||
---
|
||||
|
||||
# Convex HTTP Actions
|
||||
|
||||
Build HTTP endpoints for webhooks, external API integrations, and custom routes in Convex applications.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/functions/http-actions
|
||||
- Actions Overview: https://docs.convex.dev/functions/actions
|
||||
- Authentication: https://docs.convex.dev/auth
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### HTTP Actions Overview
|
||||
|
||||
HTTP actions allow you to define HTTP endpoints in Convex that can:
|
||||
|
||||
- Receive webhooks from third-party services
|
||||
- Create custom API routes
|
||||
- Handle file uploads
|
||||
- Integrate with external services
|
||||
- Serve dynamic content
|
||||
|
||||
### Basic HTTP Router Setup
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// Simple GET endpoint
|
||||
http.route({
|
||||
path: "/health",
|
||||
method: "GET",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
return new Response(JSON.stringify({ status: "ok" }), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### Request Handling
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// Handle JSON body
|
||||
http.route({
|
||||
path: "/api/data",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
// Parse JSON body
|
||||
const body = await request.json();
|
||||
|
||||
// Access headers
|
||||
const authHeader = request.headers.get("Authorization");
|
||||
|
||||
// Access URL parameters
|
||||
const url = new URL(request.url);
|
||||
const queryParam = url.searchParams.get("filter");
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({ received: body, filter: queryParam }),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
// Handle form data
|
||||
http.route({
|
||||
path: "/api/form",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const formData = await request.formData();
|
||||
const name = formData.get("name");
|
||||
const email = formData.get("email");
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({ name, email }),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
// Handle raw bytes
|
||||
http.route({
|
||||
path: "/api/upload",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const bytes = await request.bytes();
|
||||
const contentType = request.headers.get("Content-Type") ?? "application/octet-stream";
|
||||
|
||||
// Store in Convex storage
|
||||
const blob = new Blob([bytes], { type: contentType });
|
||||
const storageId = await ctx.storage.store(blob);
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({ storageId }),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### Path Parameters
|
||||
|
||||
Use path prefix matching for dynamic routes:
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// Match /api/users/* with pathPrefix
|
||||
http.route({
|
||||
pathPrefix: "/api/users/",
|
||||
method: "GET",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const url = new URL(request.url);
|
||||
// Extract user ID from path: /api/users/123 -> "123"
|
||||
const userId = url.pathname.replace("/api/users/", "");
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({ userId }),
|
||||
{
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
}
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### CORS Configuration
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// CORS headers helper
|
||||
const corsHeaders = {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
|
||||
"Access-Control-Allow-Headers": "Content-Type, Authorization",
|
||||
"Access-Control-Max-Age": "86400",
|
||||
};
|
||||
|
||||
// Handle preflight requests
|
||||
http.route({
|
||||
path: "/api/data",
|
||||
method: "OPTIONS",
|
||||
handler: httpAction(async () => {
|
||||
return new Response(null, {
|
||||
status: 204,
|
||||
headers: corsHeaders,
|
||||
});
|
||||
}),
|
||||
});
|
||||
|
||||
// Actual endpoint with CORS
|
||||
http.route({
|
||||
path: "/api/data",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const body = await request.json();
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({ success: true, data: body }),
|
||||
{
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
...corsHeaders,
|
||||
},
|
||||
}
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### Webhook Handling
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// Stripe webhook
|
||||
http.route({
|
||||
path: "/webhooks/stripe",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const signature = request.headers.get("stripe-signature");
|
||||
if (!signature) {
|
||||
return new Response("Missing signature", { status: 400 });
|
||||
}
|
||||
|
||||
const body = await request.text();
|
||||
|
||||
// Verify webhook signature (in action with Node.js)
|
||||
try {
|
||||
await ctx.runAction(internal.stripe.verifyAndProcessWebhook, {
|
||||
body,
|
||||
signature,
|
||||
});
|
||||
return new Response("OK", { status: 200 });
|
||||
} catch (error) {
|
||||
console.error("Webhook error:", error);
|
||||
return new Response("Webhook error", { status: 400 });
|
||||
}
|
||||
}),
|
||||
});
|
||||
|
||||
// GitHub webhook
|
||||
http.route({
|
||||
path: "/webhooks/github",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const event = request.headers.get("X-GitHub-Event");
|
||||
const signature = request.headers.get("X-Hub-Signature-256");
|
||||
|
||||
if (!signature) {
|
||||
return new Response("Missing signature", { status: 400 });
|
||||
}
|
||||
|
||||
const body = await request.text();
|
||||
|
||||
await ctx.runAction(internal.github.processWebhook, {
|
||||
event: event ?? "unknown",
|
||||
body,
|
||||
signature,
|
||||
});
|
||||
|
||||
return new Response("OK", { status: 200 });
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### Webhook Signature Verification
|
||||
|
||||
```typescript
|
||||
// convex/stripe.ts
|
||||
"use node";
|
||||
|
||||
import { internalAction, internalMutation } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
import { v } from "convex/values";
|
||||
import Stripe from "stripe";
|
||||
|
||||
const stripe = new Stripe(process.env.STRIPE_SECRET_KEY!);
|
||||
|
||||
export const verifyAndProcessWebhook = internalAction({
|
||||
args: {
|
||||
body: v.string(),
|
||||
signature: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const webhookSecret = process.env.STRIPE_WEBHOOK_SECRET!;
|
||||
|
||||
// Verify signature
|
||||
const event = stripe.webhooks.constructEvent(
|
||||
args.body,
|
||||
args.signature,
|
||||
webhookSecret
|
||||
);
|
||||
|
||||
// Process based on event type
|
||||
switch (event.type) {
|
||||
case "checkout.session.completed":
|
||||
await ctx.runMutation(internal.payments.handleCheckoutComplete, {
|
||||
sessionId: event.data.object.id,
|
||||
customerId: event.data.object.customer as string,
|
||||
});
|
||||
break;
|
||||
|
||||
case "customer.subscription.updated":
|
||||
await ctx.runMutation(internal.subscriptions.handleUpdate, {
|
||||
subscriptionId: event.data.object.id,
|
||||
status: event.data.object.status,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Authentication in HTTP Actions
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// API key authentication
|
||||
http.route({
|
||||
path: "/api/protected",
|
||||
method: "GET",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const apiKey = request.headers.get("X-API-Key");
|
||||
|
||||
if (!apiKey) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Missing API key" }),
|
||||
{ status: 401, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
// Validate API key
|
||||
const isValid = await ctx.runQuery(internal.auth.validateApiKey, {
|
||||
apiKey,
|
||||
});
|
||||
|
||||
if (!isValid) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Invalid API key" }),
|
||||
{ status: 403, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
// Process authenticated request
|
||||
const data = await ctx.runQuery(internal.data.getProtectedData, {});
|
||||
|
||||
return new Response(
|
||||
JSON.stringify(data),
|
||||
{ status: 200, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
// Bearer token authentication
|
||||
http.route({
|
||||
path: "/api/user",
|
||||
method: "GET",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const authHeader = request.headers.get("Authorization");
|
||||
|
||||
if (!authHeader?.startsWith("Bearer ")) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Missing or invalid Authorization header" }),
|
||||
{ status: 401, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
const token = authHeader.slice(7);
|
||||
|
||||
// Validate token and get user
|
||||
const user = await ctx.runQuery(internal.auth.validateToken, { token });
|
||||
|
||||
if (!user) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Invalid token" }),
|
||||
{ status: 403, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}
|
||||
|
||||
return new Response(
|
||||
JSON.stringify(user),
|
||||
{ status: 200, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### Calling Mutations and Queries
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
import { api, internal } from "./_generated/api";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
http.route({
|
||||
path: "/api/items",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const body = await request.json();
|
||||
|
||||
// Call a mutation
|
||||
const itemId = await ctx.runMutation(internal.items.create, {
|
||||
name: body.name,
|
||||
description: body.description,
|
||||
});
|
||||
|
||||
// Query the created item
|
||||
const item = await ctx.runQuery(internal.items.get, { id: itemId });
|
||||
|
||||
return new Response(
|
||||
JSON.stringify(item),
|
||||
{ status: 201, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
http.route({
|
||||
path: "/api/items",
|
||||
method: "GET",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const url = new URL(request.url);
|
||||
const limit = parseInt(url.searchParams.get("limit") ?? "10");
|
||||
|
||||
const items = await ctx.runQuery(internal.items.list, { limit });
|
||||
|
||||
return new Response(
|
||||
JSON.stringify(items),
|
||||
{ status: 200, headers: { "Content-Type": "application/json" } }
|
||||
);
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// Helper for JSON responses
|
||||
function jsonResponse(data: unknown, status = 200) {
|
||||
return new Response(JSON.stringify(data), {
|
||||
status,
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
}
|
||||
|
||||
// Helper for error responses
|
||||
function errorResponse(message: string, status: number) {
|
||||
return jsonResponse({ error: message }, status);
|
||||
}
|
||||
|
||||
http.route({
|
||||
path: "/api/process",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
try {
|
||||
// Validate content type
|
||||
const contentType = request.headers.get("Content-Type");
|
||||
if (!contentType?.includes("application/json")) {
|
||||
return errorResponse("Content-Type must be application/json", 415);
|
||||
}
|
||||
|
||||
// Parse body
|
||||
let body;
|
||||
try {
|
||||
body = await request.json();
|
||||
} catch {
|
||||
return errorResponse("Invalid JSON body", 400);
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if (!body.data) {
|
||||
return errorResponse("Missing required field: data", 400);
|
||||
}
|
||||
|
||||
// Process request
|
||||
const result = await ctx.runMutation(internal.process.handle, {
|
||||
data: body.data,
|
||||
});
|
||||
|
||||
return jsonResponse({ success: true, result }, 200);
|
||||
} catch (error) {
|
||||
console.error("Processing error:", error);
|
||||
return errorResponse("Internal server error", 500);
|
||||
}
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
### File Downloads
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
import { Id } from "./_generated/dataModel";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
http.route({
|
||||
pathPrefix: "/files/",
|
||||
method: "GET",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const url = new URL(request.url);
|
||||
const fileId = url.pathname.replace("/files/", "") as Id<"_storage">;
|
||||
|
||||
// Get file URL from storage
|
||||
const fileUrl = await ctx.storage.getUrl(fileId);
|
||||
|
||||
if (!fileUrl) {
|
||||
return new Response("File not found", { status: 404 });
|
||||
}
|
||||
|
||||
// Redirect to the file URL
|
||||
return Response.redirect(fileUrl, 302);
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Complete Webhook Integration
|
||||
|
||||
```typescript
|
||||
// convex/http.ts
|
||||
import { httpRouter } from "convex/server";
|
||||
import { httpAction } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
const http = httpRouter();
|
||||
|
||||
// Clerk webhook for user sync
|
||||
http.route({
|
||||
path: "/webhooks/clerk",
|
||||
method: "POST",
|
||||
handler: httpAction(async (ctx, request) => {
|
||||
const svixId = request.headers.get("svix-id");
|
||||
const svixTimestamp = request.headers.get("svix-timestamp");
|
||||
const svixSignature = request.headers.get("svix-signature");
|
||||
|
||||
if (!svixId || !svixTimestamp || !svixSignature) {
|
||||
return new Response("Missing Svix headers", { status: 400 });
|
||||
}
|
||||
|
||||
const body = await request.text();
|
||||
|
||||
try {
|
||||
await ctx.runAction(internal.clerk.verifyAndProcess, {
|
||||
body,
|
||||
svixId,
|
||||
svixTimestamp,
|
||||
svixSignature,
|
||||
});
|
||||
return new Response("OK", { status: 200 });
|
||||
} catch (error) {
|
||||
console.error("Clerk webhook error:", error);
|
||||
return new Response("Webhook verification failed", { status: 400 });
|
||||
}
|
||||
}),
|
||||
});
|
||||
|
||||
export default http;
|
||||
```
|
||||
|
||||
```typescript
|
||||
// convex/clerk.ts
|
||||
"use node";
|
||||
|
||||
import { internalAction, internalMutation } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
import { v } from "convex/values";
|
||||
import { Webhook } from "svix";
|
||||
|
||||
export const verifyAndProcess = internalAction({
|
||||
args: {
|
||||
body: v.string(),
|
||||
svixId: v.string(),
|
||||
svixTimestamp: v.string(),
|
||||
svixSignature: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const webhookSecret = process.env.CLERK_WEBHOOK_SECRET!;
|
||||
const wh = new Webhook(webhookSecret);
|
||||
|
||||
const event = wh.verify(args.body, {
|
||||
"svix-id": args.svixId,
|
||||
"svix-timestamp": args.svixTimestamp,
|
||||
"svix-signature": args.svixSignature,
|
||||
}) as { type: string; data: Record<string, unknown> };
|
||||
|
||||
switch (event.type) {
|
||||
case "user.created":
|
||||
await ctx.runMutation(internal.users.create, {
|
||||
clerkId: event.data.id as string,
|
||||
email: (event.data.email_addresses as Array<{ email_address: string }>)[0]?.email_address,
|
||||
name: `${event.data.first_name} ${event.data.last_name}`,
|
||||
});
|
||||
break;
|
||||
|
||||
case "user.updated":
|
||||
await ctx.runMutation(internal.users.update, {
|
||||
clerkId: event.data.id as string,
|
||||
email: (event.data.email_addresses as Array<{ email_address: string }>)[0]?.email_address,
|
||||
name: `${event.data.first_name} ${event.data.last_name}`,
|
||||
});
|
||||
break;
|
||||
|
||||
case "user.deleted":
|
||||
await ctx.runMutation(internal.users.remove, {
|
||||
clerkId: event.data.id as string,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Schema for HTTP API
|
||||
|
||||
```typescript
|
||||
// convex/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
apiKeys: defineTable({
|
||||
key: v.string(),
|
||||
userId: v.id("users"),
|
||||
name: v.string(),
|
||||
createdAt: v.number(),
|
||||
lastUsedAt: v.optional(v.number()),
|
||||
revokedAt: v.optional(v.number()),
|
||||
})
|
||||
.index("by_key", ["key"])
|
||||
.index("by_user", ["userId"]),
|
||||
|
||||
webhookEvents: defineTable({
|
||||
source: v.string(),
|
||||
eventType: v.string(),
|
||||
payload: v.any(),
|
||||
processedAt: v.number(),
|
||||
status: v.union(
|
||||
v.literal("success"),
|
||||
v.literal("failed")
|
||||
),
|
||||
error: v.optional(v.string()),
|
||||
})
|
||||
.index("by_source", ["source"])
|
||||
.index("by_status", ["status"]),
|
||||
|
||||
users: defineTable({
|
||||
clerkId: v.string(),
|
||||
email: v.string(),
|
||||
name: v.string(),
|
||||
}).index("by_clerk_id", ["clerkId"]),
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Always validate and sanitize incoming request data
|
||||
- Use internal functions for database operations
|
||||
- Implement proper error handling with appropriate status codes
|
||||
- Add CORS headers for browser-accessible endpoints
|
||||
- Verify webhook signatures before processing
|
||||
- Log webhook events for debugging
|
||||
- Use environment variables for secrets
|
||||
- Handle timeouts gracefully
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Missing CORS preflight handler** - Browsers send OPTIONS requests first
|
||||
2. **Not validating webhook signatures** - Security vulnerability
|
||||
3. **Exposing internal functions** - Use internal functions from HTTP actions
|
||||
4. **Forgetting Content-Type headers** - Clients may not parse responses correctly
|
||||
5. **Not handling request body errors** - Invalid JSON will throw
|
||||
6. **Blocking on long operations** - Use scheduled functions for heavy processing
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- HTTP Actions: https://docs.convex.dev/functions/http-actions
|
||||
- Actions: https://docs.convex.dev/functions/actions
|
||||
- Authentication: https://docs.convex.dev/auth
|
||||
712
.claude/skills/convex-migrations/SKILL.md
Normal file
712
.claude/skills/convex-migrations/SKILL.md
Normal file
@@ -0,0 +1,712 @@
|
||||
---
|
||||
name: convex-migrations
|
||||
displayName: Convex Migrations
|
||||
description: Schema migration strategies for evolving applications including adding new fields, backfilling data, removing deprecated fields, index migrations, and zero-downtime migration patterns
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, migrations, schema, database, data-modeling]
|
||||
---
|
||||
|
||||
# Convex Migrations
|
||||
|
||||
Evolve your Convex database schema safely with patterns for adding fields, backfilling data, removing deprecated fields, and maintaining zero-downtime deployments.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/database/schemas
|
||||
- Schema Overview: https://docs.convex.dev/database
|
||||
- Migration Patterns: https://stack.convex.dev/migrate-data-postgres-to-convex
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### Migration Philosophy
|
||||
|
||||
Convex handles schema evolution differently than traditional databases:
|
||||
|
||||
- No explicit migration files or commands
|
||||
- Schema changes deploy instantly with `npx convex dev`
|
||||
- Existing data is not automatically transformed
|
||||
- Use optional fields and backfill mutations for safe migrations
|
||||
|
||||
### Adding New Fields
|
||||
|
||||
Start with optional fields, then backfill:
|
||||
|
||||
```typescript
|
||||
// Step 1: Add optional field to schema
|
||||
// convex/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
users: defineTable({
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
// New field - start as optional
|
||||
avatarUrl: v.optional(v.string()),
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Step 2: Update code to handle both cases
|
||||
// convex/users.ts
|
||||
import { query } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const getUser = query({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id("users"),
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
avatarUrl: v.union(v.string(), v.null()),
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args) => {
|
||||
const user = await ctx.db.get(args.userId);
|
||||
if (!user) return null;
|
||||
|
||||
return {
|
||||
_id: user._id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
// Handle missing field gracefully
|
||||
avatarUrl: user.avatarUrl ?? null,
|
||||
};
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Step 3: Backfill existing documents
|
||||
// convex/migrations.ts
|
||||
import { internalMutation } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
import { v } from "convex/values";
|
||||
|
||||
const BATCH_SIZE = 100;
|
||||
|
||||
export const backfillAvatarUrl = internalMutation({
|
||||
args: {
|
||||
cursor: v.optional(v.string()),
|
||||
},
|
||||
returns: v.object({
|
||||
processed: v.number(),
|
||||
hasMore: v.boolean(),
|
||||
}),
|
||||
handler: async (ctx, args) => {
|
||||
const result = await ctx.db
|
||||
.query("users")
|
||||
.paginate({ numItems: BATCH_SIZE, cursor: args.cursor ?? null });
|
||||
|
||||
let processed = 0;
|
||||
for (const user of result.page) {
|
||||
// Only update if field is missing
|
||||
if (user.avatarUrl === undefined) {
|
||||
await ctx.db.patch(user._id, {
|
||||
avatarUrl: generateDefaultAvatar(user.name),
|
||||
});
|
||||
processed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Schedule next batch if needed
|
||||
if (!result.isDone) {
|
||||
await ctx.scheduler.runAfter(0, internal.migrations.backfillAvatarUrl, {
|
||||
cursor: result.continueCursor,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
processed,
|
||||
hasMore: !result.isDone,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
function generateDefaultAvatar(name: string): string {
|
||||
return `https://api.dicebear.com/7.x/initials/svg?seed=${encodeURIComponent(name)}`;
|
||||
}
|
||||
```
|
||||
|
||||
```typescript
|
||||
// Step 4: After backfill completes, make field required
|
||||
// convex/schema.ts
|
||||
export default defineSchema({
|
||||
users: defineTable({
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
avatarUrl: v.string(), // Now required
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
### Removing Fields
|
||||
|
||||
Remove field usage before removing from schema:
|
||||
|
||||
```typescript
|
||||
// Step 1: Stop using the field in queries and mutations
|
||||
// Mark as deprecated in code comments
|
||||
|
||||
// Step 2: Remove field from schema (make optional first if needed)
|
||||
// convex/schema.ts
|
||||
export default defineSchema({
|
||||
posts: defineTable({
|
||||
title: v.string(),
|
||||
content: v.string(),
|
||||
authorId: v.id("users"),
|
||||
// legacyField: v.optional(v.string()), // Remove this line
|
||||
}),
|
||||
});
|
||||
|
||||
// Step 3: Optionally clean up existing data
|
||||
// convex/migrations.ts
|
||||
export const removeDeprecatedField = internalMutation({
|
||||
args: {
|
||||
cursor: v.optional(v.string()),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const result = await ctx.db
|
||||
.query("posts")
|
||||
.paginate({ numItems: 100, cursor: args.cursor ?? null });
|
||||
|
||||
for (const post of result.page) {
|
||||
// Use replace to remove the field entirely
|
||||
const { legacyField, ...rest } = post as typeof post & { legacyField?: string };
|
||||
if (legacyField !== undefined) {
|
||||
await ctx.db.replace(post._id, rest);
|
||||
}
|
||||
}
|
||||
|
||||
if (!result.isDone) {
|
||||
await ctx.scheduler.runAfter(0, internal.migrations.removeDeprecatedField, {
|
||||
cursor: result.continueCursor,
|
||||
});
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Renaming Fields
|
||||
|
||||
Renaming requires copying data to new field, then removing old:
|
||||
|
||||
```typescript
|
||||
// Step 1: Add new field as optional
|
||||
// convex/schema.ts
|
||||
export default defineSchema({
|
||||
users: defineTable({
|
||||
userName: v.string(), // Old field
|
||||
displayName: v.optional(v.string()), // New field
|
||||
}),
|
||||
});
|
||||
|
||||
// Step 2: Update code to read from new field with fallback
|
||||
export const getUser = query({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.object({
|
||||
_id: v.id("users"),
|
||||
displayName: v.string(),
|
||||
}),
|
||||
handler: async (ctx, args) => {
|
||||
const user = await ctx.db.get(args.userId);
|
||||
if (!user) throw new Error("User not found");
|
||||
|
||||
return {
|
||||
_id: user._id,
|
||||
// Read new field, fall back to old
|
||||
displayName: user.displayName ?? user.userName,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
// Step 3: Backfill to copy data
|
||||
export const backfillDisplayName = internalMutation({
|
||||
args: { cursor: v.optional(v.string()) },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const result = await ctx.db
|
||||
.query("users")
|
||||
.paginate({ numItems: 100, cursor: args.cursor ?? null });
|
||||
|
||||
for (const user of result.page) {
|
||||
if (user.displayName === undefined) {
|
||||
await ctx.db.patch(user._id, {
|
||||
displayName: user.userName,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!result.isDone) {
|
||||
await ctx.scheduler.runAfter(0, internal.migrations.backfillDisplayName, {
|
||||
cursor: result.continueCursor,
|
||||
});
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// Step 4: After backfill, update schema to make new field required
|
||||
// and remove old field
|
||||
export default defineSchema({
|
||||
users: defineTable({
|
||||
// userName removed
|
||||
displayName: v.string(),
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
### Adding Indexes
|
||||
|
||||
Add indexes before using them in queries:
|
||||
|
||||
```typescript
|
||||
// Step 1: Add index to schema
|
||||
// convex/schema.ts
|
||||
export default defineSchema({
|
||||
posts: defineTable({
|
||||
title: v.string(),
|
||||
authorId: v.id("users"),
|
||||
publishedAt: v.optional(v.number()),
|
||||
status: v.string(),
|
||||
})
|
||||
.index("by_author", ["authorId"])
|
||||
// New index
|
||||
.index("by_status_and_published", ["status", "publishedAt"]),
|
||||
});
|
||||
|
||||
// Step 2: Deploy schema change
|
||||
// Run: npx convex dev
|
||||
|
||||
// Step 3: Now use the index in queries
|
||||
export const getPublishedPosts = query({
|
||||
args: {},
|
||||
returns: v.array(v.object({
|
||||
_id: v.id("posts"),
|
||||
title: v.string(),
|
||||
publishedAt: v.number(),
|
||||
})),
|
||||
handler: async (ctx) => {
|
||||
const posts = await ctx.db
|
||||
.query("posts")
|
||||
.withIndex("by_status_and_published", (q) =>
|
||||
q.eq("status", "published")
|
||||
)
|
||||
.order("desc")
|
||||
.take(10);
|
||||
|
||||
return posts
|
||||
.filter((p) => p.publishedAt !== undefined)
|
||||
.map((p) => ({
|
||||
_id: p._id,
|
||||
title: p.title,
|
||||
publishedAt: p.publishedAt!,
|
||||
}));
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Changing Field Types
|
||||
|
||||
Type changes require careful migration:
|
||||
|
||||
```typescript
|
||||
// Example: Change from string to number for a "priority" field
|
||||
|
||||
// Step 1: Add new field with new type
|
||||
// convex/schema.ts
|
||||
export default defineSchema({
|
||||
tasks: defineTable({
|
||||
title: v.string(),
|
||||
priority: v.string(), // Old: "low", "medium", "high"
|
||||
priorityLevel: v.optional(v.number()), // New: 1, 2, 3
|
||||
}),
|
||||
});
|
||||
|
||||
// Step 2: Backfill with type conversion
|
||||
export const migratePriorityToNumber = internalMutation({
|
||||
args: { cursor: v.optional(v.string()) },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const result = await ctx.db
|
||||
.query("tasks")
|
||||
.paginate({ numItems: 100, cursor: args.cursor ?? null });
|
||||
|
||||
const priorityMap: Record<string, number> = {
|
||||
low: 1,
|
||||
medium: 2,
|
||||
high: 3,
|
||||
};
|
||||
|
||||
for (const task of result.page) {
|
||||
if (task.priorityLevel === undefined) {
|
||||
await ctx.db.patch(task._id, {
|
||||
priorityLevel: priorityMap[task.priority] ?? 1,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (!result.isDone) {
|
||||
await ctx.scheduler.runAfter(0, internal.migrations.migratePriorityToNumber, {
|
||||
cursor: result.continueCursor,
|
||||
});
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// Step 3: Update code to use new field
|
||||
export const getTask = query({
|
||||
args: { taskId: v.id("tasks") },
|
||||
returns: v.object({
|
||||
_id: v.id("tasks"),
|
||||
title: v.string(),
|
||||
priorityLevel: v.number(),
|
||||
}),
|
||||
handler: async (ctx, args) => {
|
||||
const task = await ctx.db.get(args.taskId);
|
||||
if (!task) throw new Error("Task not found");
|
||||
|
||||
const priorityMap: Record<string, number> = {
|
||||
low: 1,
|
||||
medium: 2,
|
||||
high: 3,
|
||||
};
|
||||
|
||||
return {
|
||||
_id: task._id,
|
||||
title: task.title,
|
||||
priorityLevel: task.priorityLevel ?? priorityMap[task.priority] ?? 1,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
// Step 4: After backfill, update schema
|
||||
export default defineSchema({
|
||||
tasks: defineTable({
|
||||
title: v.string(),
|
||||
// priority field removed
|
||||
priorityLevel: v.number(),
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
### Migration Runner Pattern
|
||||
|
||||
Create a reusable migration system:
|
||||
|
||||
```typescript
|
||||
// convex/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
migrations: defineTable({
|
||||
name: v.string(),
|
||||
startedAt: v.number(),
|
||||
completedAt: v.optional(v.number()),
|
||||
status: v.union(
|
||||
v.literal("running"),
|
||||
v.literal("completed"),
|
||||
v.literal("failed")
|
||||
),
|
||||
error: v.optional(v.string()),
|
||||
processed: v.number(),
|
||||
}).index("by_name", ["name"]),
|
||||
|
||||
// Your other tables...
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// convex/migrations.ts
|
||||
import { internalMutation, internalQuery } from "./_generated/server";
|
||||
import { internal } from "./_generated/api";
|
||||
import { v } from "convex/values";
|
||||
|
||||
// Check if migration has run
|
||||
export const hasMigrationRun = internalQuery({
|
||||
args: { name: v.string() },
|
||||
returns: v.boolean(),
|
||||
handler: async (ctx, args) => {
|
||||
const migration = await ctx.db
|
||||
.query("migrations")
|
||||
.withIndex("by_name", (q) => q.eq("name", args.name))
|
||||
.first();
|
||||
return migration?.status === "completed";
|
||||
},
|
||||
});
|
||||
|
||||
// Start a migration
|
||||
export const startMigration = internalMutation({
|
||||
args: { name: v.string() },
|
||||
returns: v.id("migrations"),
|
||||
handler: async (ctx, args) => {
|
||||
// Check if already exists
|
||||
const existing = await ctx.db
|
||||
.query("migrations")
|
||||
.withIndex("by_name", (q) => q.eq("name", args.name))
|
||||
.first();
|
||||
|
||||
if (existing) {
|
||||
if (existing.status === "completed") {
|
||||
throw new Error(`Migration ${args.name} already completed`);
|
||||
}
|
||||
if (existing.status === "running") {
|
||||
throw new Error(`Migration ${args.name} already running`);
|
||||
}
|
||||
// Reset failed migration
|
||||
await ctx.db.patch(existing._id, {
|
||||
status: "running",
|
||||
startedAt: Date.now(),
|
||||
error: undefined,
|
||||
processed: 0,
|
||||
});
|
||||
return existing._id;
|
||||
}
|
||||
|
||||
return await ctx.db.insert("migrations", {
|
||||
name: args.name,
|
||||
startedAt: Date.now(),
|
||||
status: "running",
|
||||
processed: 0,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
// Update migration progress
|
||||
export const updateMigrationProgress = internalMutation({
|
||||
args: {
|
||||
migrationId: v.id("migrations"),
|
||||
processed: v.number(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const migration = await ctx.db.get(args.migrationId);
|
||||
if (!migration) return null;
|
||||
|
||||
await ctx.db.patch(args.migrationId, {
|
||||
processed: migration.processed + args.processed,
|
||||
});
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// Complete a migration
|
||||
export const completeMigration = internalMutation({
|
||||
args: { migrationId: v.id("migrations") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.migrationId, {
|
||||
status: "completed",
|
||||
completedAt: Date.now(),
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// Fail a migration
|
||||
export const failMigration = internalMutation({
|
||||
args: {
|
||||
migrationId: v.id("migrations"),
|
||||
error: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.migrationId, {
|
||||
status: "failed",
|
||||
error: args.error,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// convex/migrations/addUserTimestamps.ts
|
||||
import { internalMutation } from "../_generated/server";
|
||||
import { internal } from "../_generated/api";
|
||||
import { v } from "convex/values";
|
||||
|
||||
const MIGRATION_NAME = "add_user_timestamps_v1";
|
||||
const BATCH_SIZE = 100;
|
||||
|
||||
export const run = internalMutation({
|
||||
args: {
|
||||
migrationId: v.optional(v.id("migrations")),
|
||||
cursor: v.optional(v.string()),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// Initialize migration on first run
|
||||
let migrationId = args.migrationId;
|
||||
if (!migrationId) {
|
||||
const hasRun = await ctx.runQuery(internal.migrations.hasMigrationRun, {
|
||||
name: MIGRATION_NAME,
|
||||
});
|
||||
if (hasRun) {
|
||||
console.log(`Migration ${MIGRATION_NAME} already completed`);
|
||||
return null;
|
||||
}
|
||||
migrationId = await ctx.runMutation(internal.migrations.startMigration, {
|
||||
name: MIGRATION_NAME,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await ctx.db
|
||||
.query("users")
|
||||
.paginate({ numItems: BATCH_SIZE, cursor: args.cursor ?? null });
|
||||
|
||||
let processed = 0;
|
||||
for (const user of result.page) {
|
||||
if (user.createdAt === undefined) {
|
||||
await ctx.db.patch(user._id, {
|
||||
createdAt: user._creationTime,
|
||||
updatedAt: user._creationTime,
|
||||
});
|
||||
processed++;
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress
|
||||
await ctx.runMutation(internal.migrations.updateMigrationProgress, {
|
||||
migrationId,
|
||||
processed,
|
||||
});
|
||||
|
||||
// Continue or complete
|
||||
if (!result.isDone) {
|
||||
await ctx.scheduler.runAfter(0, internal.migrations.addUserTimestamps.run, {
|
||||
migrationId,
|
||||
cursor: result.continueCursor,
|
||||
});
|
||||
} else {
|
||||
await ctx.runMutation(internal.migrations.completeMigration, {
|
||||
migrationId,
|
||||
});
|
||||
console.log(`Migration ${MIGRATION_NAME} completed`);
|
||||
}
|
||||
} catch (error) {
|
||||
await ctx.runMutation(internal.migrations.failMigration, {
|
||||
migrationId,
|
||||
error: String(error),
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Schema with Migration Support
|
||||
|
||||
```typescript
|
||||
// convex/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
// Migration tracking
|
||||
migrations: defineTable({
|
||||
name: v.string(),
|
||||
startedAt: v.number(),
|
||||
completedAt: v.optional(v.number()),
|
||||
status: v.union(
|
||||
v.literal("running"),
|
||||
v.literal("completed"),
|
||||
v.literal("failed")
|
||||
),
|
||||
error: v.optional(v.string()),
|
||||
processed: v.number(),
|
||||
}).index("by_name", ["name"]),
|
||||
|
||||
// Users table with evolved schema
|
||||
users: defineTable({
|
||||
// Original fields
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
|
||||
// Added in migration v1
|
||||
createdAt: v.optional(v.number()),
|
||||
updatedAt: v.optional(v.number()),
|
||||
|
||||
// Added in migration v2
|
||||
avatarUrl: v.optional(v.string()),
|
||||
|
||||
// Added in migration v3
|
||||
settings: v.optional(v.object({
|
||||
theme: v.string(),
|
||||
notifications: v.boolean(),
|
||||
})),
|
||||
})
|
||||
.index("by_email", ["email"])
|
||||
.index("by_createdAt", ["createdAt"]),
|
||||
|
||||
// Posts table with indexes for common queries
|
||||
posts: defineTable({
|
||||
title: v.string(),
|
||||
content: v.string(),
|
||||
authorId: v.id("users"),
|
||||
status: v.union(
|
||||
v.literal("draft"),
|
||||
v.literal("published"),
|
||||
v.literal("archived")
|
||||
),
|
||||
publishedAt: v.optional(v.number()),
|
||||
createdAt: v.number(),
|
||||
updatedAt: v.number(),
|
||||
})
|
||||
.index("by_author", ["authorId"])
|
||||
.index("by_status", ["status"])
|
||||
.index("by_author_and_status", ["authorId", "status"])
|
||||
.index("by_publishedAt", ["publishedAt"]),
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Always start with optional fields when adding new data
|
||||
- Backfill data in batches to avoid timeouts
|
||||
- Test migrations on development before production
|
||||
- Keep track of completed migrations to avoid re-running
|
||||
- Update code to handle both old and new data during transition
|
||||
- Remove deprecated fields only after all code stops using them
|
||||
- Use pagination for large datasets
|
||||
- Add appropriate indexes before running queries on new fields
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Making new fields required immediately** - Breaks existing documents
|
||||
2. **Not handling undefined values** - Causes runtime errors
|
||||
3. **Large batch sizes** - Causes function timeouts
|
||||
4. **Forgetting to update indexes** - Queries fail or perform poorly
|
||||
5. **Running migrations without tracking** - May run multiple times
|
||||
6. **Removing fields before code update** - Breaks existing functionality
|
||||
7. **Not testing on development** - Production data issues
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- Schemas: https://docs.convex.dev/database/schemas
|
||||
- Database Overview: https://docs.convex.dev/database
|
||||
- Migration Patterns: https://stack.convex.dev/migrate-data-postgres-to-convex
|
||||
443
.claude/skills/convex-realtime/SKILL.md
Normal file
443
.claude/skills/convex-realtime/SKILL.md
Normal file
@@ -0,0 +1,443 @@
|
||||
---
|
||||
name: convex-realtime
|
||||
displayName: Convex Realtime
|
||||
description: Patterns for building reactive apps including subscription management, optimistic updates, cache behavior, and paginated queries with cursor-based loading
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, realtime, subscriptions, optimistic-updates, pagination]
|
||||
---
|
||||
|
||||
# Convex Realtime
|
||||
|
||||
Build reactive applications with Convex's real-time subscriptions, optimistic updates, intelligent caching, and cursor-based pagination.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/client/react
|
||||
- Optimistic Updates: https://docs.convex.dev/client/react/optimistic-updates
|
||||
- Pagination: https://docs.convex.dev/database/pagination
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### How Convex Realtime Works
|
||||
|
||||
1. **Automatic Subscriptions** - useQuery creates a subscription that updates automatically
|
||||
2. **Smart Caching** - Query results are cached and shared across components
|
||||
3. **Consistency** - All subscriptions see a consistent view of the database
|
||||
4. **Efficient Updates** - Only re-renders when relevant data changes
|
||||
|
||||
### Basic Subscriptions
|
||||
|
||||
```typescript
|
||||
// React component with real-time data
|
||||
import { useQuery } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function TaskList({ userId }: { userId: Id<"users"> }) {
|
||||
// Automatically subscribes and updates in real-time
|
||||
const tasks = useQuery(api.tasks.list, { userId });
|
||||
|
||||
if (tasks === undefined) {
|
||||
return <div>Loading...</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<ul>
|
||||
{tasks.map((task) => (
|
||||
<li key={task._id}>{task.title}</li>
|
||||
))}
|
||||
</ul>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Conditional Queries
|
||||
|
||||
```typescript
|
||||
import { useQuery } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function UserProfile({ userId }: { userId: Id<"users"> | null }) {
|
||||
// Skip query when userId is null
|
||||
const user = useQuery(
|
||||
api.users.get,
|
||||
userId ? { userId } : "skip"
|
||||
);
|
||||
|
||||
if (userId === null) {
|
||||
return <div>Select a user</div>;
|
||||
}
|
||||
|
||||
if (user === undefined) {
|
||||
return <div>Loading...</div>;
|
||||
}
|
||||
|
||||
return <div>{user.name}</div>;
|
||||
}
|
||||
```
|
||||
|
||||
### Mutations with Real-time Updates
|
||||
|
||||
```typescript
|
||||
import { useMutation, useQuery } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function TaskManager({ userId }: { userId: Id<"users"> }) {
|
||||
const tasks = useQuery(api.tasks.list, { userId });
|
||||
const createTask = useMutation(api.tasks.create);
|
||||
const toggleTask = useMutation(api.tasks.toggle);
|
||||
|
||||
const handleCreate = async (title: string) => {
|
||||
// Mutation triggers automatic re-render when data changes
|
||||
await createTask({ title, userId });
|
||||
};
|
||||
|
||||
const handleToggle = async (taskId: Id<"tasks">) => {
|
||||
await toggleTask({ taskId });
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<button onClick={() => handleCreate("New Task")}>Add Task</button>
|
||||
<ul>
|
||||
{tasks?.map((task) => (
|
||||
<li key={task._id} onClick={() => handleToggle(task._id)}>
|
||||
{task.completed ? "✓" : "○"} {task.title}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Optimistic Updates
|
||||
|
||||
Show changes immediately before server confirmation:
|
||||
|
||||
```typescript
|
||||
import { useMutation, useQuery } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
import { Id } from "../convex/_generated/dataModel";
|
||||
|
||||
function TaskItem({ task }: { task: Task }) {
|
||||
const toggleTask = useMutation(api.tasks.toggle).withOptimisticUpdate(
|
||||
(localStore, args) => {
|
||||
const { taskId } = args;
|
||||
const currentValue = localStore.getQuery(api.tasks.get, { taskId });
|
||||
|
||||
if (currentValue !== undefined) {
|
||||
localStore.setQuery(api.tasks.get, { taskId }, {
|
||||
...currentValue,
|
||||
completed: !currentValue.completed,
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
return (
|
||||
<div onClick={() => toggleTask({ taskId: task._id })}>
|
||||
{task.completed ? "✓" : "○"} {task.title}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Optimistic Updates for Lists
|
||||
|
||||
```typescript
|
||||
import { useMutation } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function useCreateTask(userId: Id<"users">) {
|
||||
return useMutation(api.tasks.create).withOptimisticUpdate(
|
||||
(localStore, args) => {
|
||||
const { title, userId } = args;
|
||||
const currentTasks = localStore.getQuery(api.tasks.list, { userId });
|
||||
|
||||
if (currentTasks !== undefined) {
|
||||
// Add optimistic task to the list
|
||||
const optimisticTask = {
|
||||
_id: crypto.randomUUID() as Id<"tasks">,
|
||||
_creationTime: Date.now(),
|
||||
title,
|
||||
userId,
|
||||
completed: false,
|
||||
};
|
||||
|
||||
localStore.setQuery(api.tasks.list, { userId }, [
|
||||
optimisticTask,
|
||||
...currentTasks,
|
||||
]);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Cursor-Based Pagination
|
||||
|
||||
```typescript
|
||||
// convex/messages.ts
|
||||
import { query } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { paginationOptsValidator } from "convex/server";
|
||||
|
||||
export const listPaginated = query({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
paginationOpts: paginationOptsValidator,
|
||||
},
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db
|
||||
.query("messages")
|
||||
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
|
||||
.order("desc")
|
||||
.paginate(args.paginationOpts);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// React component with pagination
|
||||
import { usePaginatedQuery } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function MessageList({ channelId }: { channelId: Id<"channels"> }) {
|
||||
const { results, status, loadMore } = usePaginatedQuery(
|
||||
api.messages.listPaginated,
|
||||
{ channelId },
|
||||
{ initialNumItems: 20 }
|
||||
);
|
||||
|
||||
return (
|
||||
<div>
|
||||
{results.map((message) => (
|
||||
<div key={message._id}>{message.content}</div>
|
||||
))}
|
||||
|
||||
{status === "CanLoadMore" && (
|
||||
<button onClick={() => loadMore(20)}>Load More</button>
|
||||
)}
|
||||
|
||||
{status === "LoadingMore" && <div>Loading...</div>}
|
||||
|
||||
{status === "Exhausted" && <div>No more messages</div>}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Infinite Scroll Pattern
|
||||
|
||||
```typescript
|
||||
import { usePaginatedQuery } from "convex/react";
|
||||
import { useEffect, useRef } from "react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function InfiniteMessageList({ channelId }: { channelId: Id<"channels"> }) {
|
||||
const { results, status, loadMore } = usePaginatedQuery(
|
||||
api.messages.listPaginated,
|
||||
{ channelId },
|
||||
{ initialNumItems: 20 }
|
||||
);
|
||||
|
||||
const observerRef = useRef<IntersectionObserver>();
|
||||
const loadMoreRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (observerRef.current) {
|
||||
observerRef.current.disconnect();
|
||||
}
|
||||
|
||||
observerRef.current = new IntersectionObserver((entries) => {
|
||||
if (entries[0].isIntersecting && status === "CanLoadMore") {
|
||||
loadMore(20);
|
||||
}
|
||||
});
|
||||
|
||||
if (loadMoreRef.current) {
|
||||
observerRef.current.observe(loadMoreRef.current);
|
||||
}
|
||||
|
||||
return () => observerRef.current?.disconnect();
|
||||
}, [status, loadMore]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
{results.map((message) => (
|
||||
<div key={message._id}>{message.content}</div>
|
||||
))}
|
||||
<div ref={loadMoreRef} style={{ height: 1 }} />
|
||||
{status === "LoadingMore" && <div>Loading...</div>}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
### Multiple Subscriptions
|
||||
|
||||
```typescript
|
||||
import { useQuery } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
|
||||
function Dashboard({ userId }: { userId: Id<"users"> }) {
|
||||
// Multiple subscriptions update independently
|
||||
const user = useQuery(api.users.get, { userId });
|
||||
const tasks = useQuery(api.tasks.list, { userId });
|
||||
const notifications = useQuery(api.notifications.unread, { userId });
|
||||
|
||||
const isLoading = user === undefined ||
|
||||
tasks === undefined ||
|
||||
notifications === undefined;
|
||||
|
||||
if (isLoading) {
|
||||
return <div>Loading...</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h1>Welcome, {user.name}</h1>
|
||||
<p>You have {tasks.length} tasks</p>
|
||||
<p>{notifications.length} unread notifications</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Real-time Chat Application
|
||||
|
||||
```typescript
|
||||
// convex/messages.ts
|
||||
import { query, mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const list = query({
|
||||
args: { channelId: v.id("channels") },
|
||||
returns: v.array(v.object({
|
||||
_id: v.id("messages"),
|
||||
_creationTime: v.number(),
|
||||
content: v.string(),
|
||||
authorId: v.id("users"),
|
||||
authorName: v.string(),
|
||||
})),
|
||||
handler: async (ctx, args) => {
|
||||
const messages = await ctx.db
|
||||
.query("messages")
|
||||
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
|
||||
.order("desc")
|
||||
.take(100);
|
||||
|
||||
// Enrich with author names
|
||||
return Promise.all(
|
||||
messages.map(async (msg) => {
|
||||
const author = await ctx.db.get(msg.authorId);
|
||||
return {
|
||||
...msg,
|
||||
authorName: author?.name ?? "Unknown",
|
||||
};
|
||||
})
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
export const send = mutation({
|
||||
args: {
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.id("users"),
|
||||
content: v.string(),
|
||||
},
|
||||
returns: v.id("messages"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("messages", {
|
||||
channelId: args.channelId,
|
||||
authorId: args.authorId,
|
||||
content: args.content,
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
```typescript
|
||||
// ChatRoom.tsx
|
||||
import { useQuery, useMutation } from "convex/react";
|
||||
import { api } from "../convex/_generated/api";
|
||||
import { useState, useRef, useEffect } from "react";
|
||||
|
||||
function ChatRoom({ channelId, userId }: Props) {
|
||||
const messages = useQuery(api.messages.list, { channelId });
|
||||
const sendMessage = useMutation(api.messages.send);
|
||||
const [input, setInput] = useState("");
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// Auto-scroll to bottom on new messages
|
||||
useEffect(() => {
|
||||
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
|
||||
}, [messages]);
|
||||
|
||||
const handleSend = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
if (!input.trim()) return;
|
||||
|
||||
await sendMessage({
|
||||
channelId,
|
||||
authorId: userId,
|
||||
content: input.trim(),
|
||||
});
|
||||
setInput("");
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="chat-room">
|
||||
<div className="messages">
|
||||
{messages?.map((msg) => (
|
||||
<div key={msg._id} className="message">
|
||||
<strong>{msg.authorName}:</strong> {msg.content}
|
||||
</div>
|
||||
))}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
|
||||
<form onSubmit={handleSend}>
|
||||
<input
|
||||
value={input}
|
||||
onChange={(e) => setInput(e.target.value)}
|
||||
placeholder="Type a message..."
|
||||
/>
|
||||
<button type="submit">Send</button>
|
||||
</form>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Use "skip" for conditional queries instead of conditionally calling hooks
|
||||
- Implement optimistic updates for better perceived performance
|
||||
- Use usePaginatedQuery for large datasets
|
||||
- Handle undefined state (loading) explicitly
|
||||
- Avoid unnecessary re-renders by memoizing derived data
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Conditional hook calls** - Use "skip" instead of if statements
|
||||
2. **Not handling loading state** - Always check for undefined
|
||||
3. **Missing optimistic update rollback** - Optimistic updates auto-rollback on error
|
||||
4. **Over-fetching with pagination** - Use appropriate page sizes
|
||||
5. **Ignoring subscription cleanup** - React handles this automatically
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- React Client: https://docs.convex.dev/client/react
|
||||
- Optimistic Updates: https://docs.convex.dev/client/react/optimistic-updates
|
||||
- Pagination: https://docs.convex.dev/database/pagination
|
||||
400
.claude/skills/convex-schema-validator/SKILL.md
Normal file
400
.claude/skills/convex-schema-validator/SKILL.md
Normal file
@@ -0,0 +1,400 @@
|
||||
---
|
||||
name: convex-schema-validator
|
||||
displayName: Convex Schema Validator
|
||||
description: Defining and validating database schemas with proper typing, index configuration, optional fields, unions, and migration strategies for schema changes
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, schema, validation, typescript, indexes, migrations]
|
||||
---
|
||||
|
||||
# Convex Schema Validator
|
||||
|
||||
Define and validate database schemas in Convex with proper typing, index configuration, optional fields, unions, and strategies for schema migrations.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/database/schemas
|
||||
- Indexes: https://docs.convex.dev/database/indexes
|
||||
- Data Types: https://docs.convex.dev/database/types
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### Basic Schema Definition
|
||||
|
||||
```typescript
|
||||
// convex/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
users: defineTable({
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
avatarUrl: v.optional(v.string()),
|
||||
createdAt: v.number(),
|
||||
}),
|
||||
|
||||
tasks: defineTable({
|
||||
title: v.string(),
|
||||
description: v.optional(v.string()),
|
||||
completed: v.boolean(),
|
||||
userId: v.id("users"),
|
||||
priority: v.union(
|
||||
v.literal("low"),
|
||||
v.literal("medium"),
|
||||
v.literal("high")
|
||||
),
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
### Validator Types
|
||||
|
||||
| Validator | TypeScript Type | Example |
|
||||
|-----------|----------------|---------|
|
||||
| `v.string()` | `string` | `"hello"` |
|
||||
| `v.number()` | `number` | `42`, `3.14` |
|
||||
| `v.boolean()` | `boolean` | `true`, `false` |
|
||||
| `v.null()` | `null` | `null` |
|
||||
| `v.int64()` | `bigint` | `9007199254740993n` |
|
||||
| `v.bytes()` | `ArrayBuffer` | Binary data |
|
||||
| `v.id("table")` | `Id<"table">` | Document reference |
|
||||
| `v.array(v)` | `T[]` | `[1, 2, 3]` |
|
||||
| `v.object({})` | `{ ... }` | `{ name: "..." }` |
|
||||
| `v.optional(v)` | `T \| undefined` | Optional field |
|
||||
| `v.union(...)` | `T1 \| T2` | Multiple types |
|
||||
| `v.literal(x)` | `"x"` | Exact value |
|
||||
| `v.any()` | `any` | Any value |
|
||||
| `v.record(k, v)` | `Record<K, V>` | Dynamic keys |
|
||||
|
||||
### Index Configuration
|
||||
|
||||
```typescript
|
||||
export default defineSchema({
|
||||
messages: defineTable({
|
||||
channelId: v.id("channels"),
|
||||
authorId: v.id("users"),
|
||||
content: v.string(),
|
||||
sentAt: v.number(),
|
||||
})
|
||||
// Single field index
|
||||
.index("by_channel", ["channelId"])
|
||||
// Compound index
|
||||
.index("by_channel_and_author", ["channelId", "authorId"])
|
||||
// Index for sorting
|
||||
.index("by_channel_and_time", ["channelId", "sentAt"]),
|
||||
|
||||
// Full-text search index
|
||||
articles: defineTable({
|
||||
title: v.string(),
|
||||
body: v.string(),
|
||||
category: v.string(),
|
||||
})
|
||||
.searchIndex("search_content", {
|
||||
searchField: "body",
|
||||
filterFields: ["category"],
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
### Complex Types
|
||||
|
||||
```typescript
|
||||
export default defineSchema({
|
||||
// Nested objects
|
||||
profiles: defineTable({
|
||||
userId: v.id("users"),
|
||||
settings: v.object({
|
||||
theme: v.union(v.literal("light"), v.literal("dark")),
|
||||
notifications: v.object({
|
||||
email: v.boolean(),
|
||||
push: v.boolean(),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
|
||||
// Arrays of objects
|
||||
orders: defineTable({
|
||||
customerId: v.id("users"),
|
||||
items: v.array(v.object({
|
||||
productId: v.id("products"),
|
||||
quantity: v.number(),
|
||||
price: v.number(),
|
||||
})),
|
||||
status: v.union(
|
||||
v.literal("pending"),
|
||||
v.literal("processing"),
|
||||
v.literal("shipped"),
|
||||
v.literal("delivered")
|
||||
),
|
||||
}),
|
||||
|
||||
// Record type for dynamic keys
|
||||
analytics: defineTable({
|
||||
date: v.string(),
|
||||
metrics: v.record(v.string(), v.number()),
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
### Discriminated Unions
|
||||
|
||||
```typescript
|
||||
export default defineSchema({
|
||||
events: defineTable(
|
||||
v.union(
|
||||
v.object({
|
||||
type: v.literal("user_signup"),
|
||||
userId: v.id("users"),
|
||||
email: v.string(),
|
||||
}),
|
||||
v.object({
|
||||
type: v.literal("purchase"),
|
||||
userId: v.id("users"),
|
||||
orderId: v.id("orders"),
|
||||
amount: v.number(),
|
||||
}),
|
||||
v.object({
|
||||
type: v.literal("page_view"),
|
||||
sessionId: v.string(),
|
||||
path: v.string(),
|
||||
})
|
||||
)
|
||||
).index("by_type", ["type"]),
|
||||
});
|
||||
```
|
||||
|
||||
### Optional vs Nullable Fields
|
||||
|
||||
```typescript
|
||||
export default defineSchema({
|
||||
items: defineTable({
|
||||
// Optional: field may not exist
|
||||
description: v.optional(v.string()),
|
||||
|
||||
// Nullable: field exists but can be null
|
||||
deletedAt: v.union(v.number(), v.null()),
|
||||
|
||||
// Optional and nullable
|
||||
notes: v.optional(v.union(v.string(), v.null())),
|
||||
}),
|
||||
});
|
||||
```
|
||||
|
||||
### Index Naming Convention
|
||||
|
||||
Always include all indexed fields in the index name:
|
||||
|
||||
```typescript
|
||||
export default defineSchema({
|
||||
posts: defineTable({
|
||||
authorId: v.id("users"),
|
||||
categoryId: v.id("categories"),
|
||||
publishedAt: v.number(),
|
||||
status: v.string(),
|
||||
})
|
||||
// Good: descriptive names
|
||||
.index("by_author", ["authorId"])
|
||||
.index("by_author_and_category", ["authorId", "categoryId"])
|
||||
.index("by_category_and_status", ["categoryId", "status"])
|
||||
.index("by_status_and_published", ["status", "publishedAt"]),
|
||||
});
|
||||
```
|
||||
|
||||
### Schema Migration Strategies
|
||||
|
||||
#### Adding New Fields
|
||||
|
||||
```typescript
|
||||
// Before
|
||||
users: defineTable({
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
})
|
||||
|
||||
// After - add as optional first
|
||||
users: defineTable({
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
avatarUrl: v.optional(v.string()), // New optional field
|
||||
})
|
||||
```
|
||||
|
||||
#### Backfilling Data
|
||||
|
||||
```typescript
|
||||
// convex/migrations.ts
|
||||
import { internalMutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const backfillAvatars = internalMutation({
|
||||
args: {},
|
||||
returns: v.number(),
|
||||
handler: async (ctx) => {
|
||||
const users = await ctx.db
|
||||
.query("users")
|
||||
.filter((q) => q.eq(q.field("avatarUrl"), undefined))
|
||||
.take(100);
|
||||
|
||||
for (const user of users) {
|
||||
await ctx.db.patch(user._id, {
|
||||
avatarUrl: `https://api.dicebear.com/7.x/initials/svg?seed=${user.name}`,
|
||||
});
|
||||
}
|
||||
|
||||
return users.length;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
#### Making Optional Fields Required
|
||||
|
||||
```typescript
|
||||
// Step 1: Backfill all null values
|
||||
// Step 2: Update schema to required
|
||||
users: defineTable({
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
avatarUrl: v.string(), // Now required after backfill
|
||||
})
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Complete E-commerce Schema
|
||||
|
||||
```typescript
|
||||
// convex/schema.ts
|
||||
import { defineSchema, defineTable } from "convex/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export default defineSchema({
|
||||
users: defineTable({
|
||||
email: v.string(),
|
||||
name: v.string(),
|
||||
role: v.union(v.literal("customer"), v.literal("admin")),
|
||||
createdAt: v.number(),
|
||||
})
|
||||
.index("by_email", ["email"])
|
||||
.index("by_role", ["role"]),
|
||||
|
||||
products: defineTable({
|
||||
name: v.string(),
|
||||
description: v.string(),
|
||||
price: v.number(),
|
||||
category: v.string(),
|
||||
inventory: v.number(),
|
||||
isActive: v.boolean(),
|
||||
})
|
||||
.index("by_category", ["category"])
|
||||
.index("by_active_and_category", ["isActive", "category"])
|
||||
.searchIndex("search_products", {
|
||||
searchField: "name",
|
||||
filterFields: ["category", "isActive"],
|
||||
}),
|
||||
|
||||
orders: defineTable({
|
||||
userId: v.id("users"),
|
||||
items: v.array(v.object({
|
||||
productId: v.id("products"),
|
||||
quantity: v.number(),
|
||||
priceAtPurchase: v.number(),
|
||||
})),
|
||||
total: v.number(),
|
||||
status: v.union(
|
||||
v.literal("pending"),
|
||||
v.literal("paid"),
|
||||
v.literal("shipped"),
|
||||
v.literal("delivered"),
|
||||
v.literal("cancelled")
|
||||
),
|
||||
shippingAddress: v.object({
|
||||
street: v.string(),
|
||||
city: v.string(),
|
||||
state: v.string(),
|
||||
zip: v.string(),
|
||||
country: v.string(),
|
||||
}),
|
||||
createdAt: v.number(),
|
||||
updatedAt: v.number(),
|
||||
})
|
||||
.index("by_user", ["userId"])
|
||||
.index("by_user_and_status", ["userId", "status"])
|
||||
.index("by_status", ["status"]),
|
||||
|
||||
reviews: defineTable({
|
||||
productId: v.id("products"),
|
||||
userId: v.id("users"),
|
||||
rating: v.number(),
|
||||
comment: v.optional(v.string()),
|
||||
createdAt: v.number(),
|
||||
})
|
||||
.index("by_product", ["productId"])
|
||||
.index("by_user", ["userId"]),
|
||||
});
|
||||
```
|
||||
|
||||
### Using Schema Types in Functions
|
||||
|
||||
```typescript
|
||||
// convex/products.ts
|
||||
import { query, mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { Doc, Id } from "./_generated/dataModel";
|
||||
|
||||
// Use Doc type for full documents
|
||||
type Product = Doc<"products">;
|
||||
|
||||
// Use Id type for references
|
||||
type ProductId = Id<"products">;
|
||||
|
||||
export const get = query({
|
||||
args: { productId: v.id("products") },
|
||||
returns: v.union(
|
||||
v.object({
|
||||
_id: v.id("products"),
|
||||
_creationTime: v.number(),
|
||||
name: v.string(),
|
||||
description: v.string(),
|
||||
price: v.number(),
|
||||
category: v.string(),
|
||||
inventory: v.number(),
|
||||
isActive: v.boolean(),
|
||||
}),
|
||||
v.null()
|
||||
),
|
||||
handler: async (ctx, args): Promise<Product | null> => {
|
||||
return await ctx.db.get(args.productId);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Always define explicit schemas rather than relying on inference
|
||||
- Use descriptive index names that include all indexed fields
|
||||
- Start with optional fields when adding new columns
|
||||
- Use discriminated unions for polymorphic data
|
||||
- Validate data at the schema level, not just in functions
|
||||
- Plan index strategy based on query patterns
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Missing indexes for queries** - Every withIndex needs a corresponding schema index
|
||||
2. **Wrong index field order** - Fields must be queried in order defined
|
||||
3. **Using v.any() excessively** - Lose type safety benefits
|
||||
4. **Not making new fields optional** - Breaks existing data
|
||||
5. **Forgetting system fields** - _id and _creationTime are automatic
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- Schemas: https://docs.convex.dev/database/schemas
|
||||
- Indexes: https://docs.convex.dev/database/indexes
|
||||
- Data Types: https://docs.convex.dev/database/types
|
||||
539
.claude/skills/convex-security-audit/SKILL.md
Normal file
539
.claude/skills/convex-security-audit/SKILL.md
Normal file
@@ -0,0 +1,539 @@
|
||||
---
|
||||
name: convex-security-audit
|
||||
displayName: Convex Security Audit
|
||||
description: Deep security review patterns for authorization logic, data access boundaries, action isolation, rate limiting, and protecting sensitive operations
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, security, audit, authorization, rate-limiting, protection]
|
||||
---
|
||||
|
||||
# Convex Security Audit
|
||||
|
||||
Comprehensive security review patterns for Convex applications including authorization logic, data access boundaries, action isolation, rate limiting, and protecting sensitive operations.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/auth/functions-auth
|
||||
- Production Security: https://docs.convex.dev/production
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### Security Audit Areas
|
||||
|
||||
1. **Authorization Logic** - Who can do what
|
||||
2. **Data Access Boundaries** - What data users can see
|
||||
3. **Action Isolation** - Protecting external API calls
|
||||
4. **Rate Limiting** - Preventing abuse
|
||||
5. **Sensitive Operations** - Protecting critical functions
|
||||
|
||||
### Authorization Logic Audit
|
||||
|
||||
#### Role-Based Access Control (RBAC)
|
||||
|
||||
```typescript
|
||||
// convex/lib/auth.ts
|
||||
import { QueryCtx, MutationCtx } from "./_generated/server";
|
||||
import { ConvexError } from "convex/values";
|
||||
import { Doc } from "./_generated/dataModel";
|
||||
|
||||
type UserRole = "user" | "moderator" | "admin" | "superadmin";
|
||||
|
||||
const roleHierarchy: Record<UserRole, number> = {
|
||||
user: 0,
|
||||
moderator: 1,
|
||||
admin: 2,
|
||||
superadmin: 3,
|
||||
};
|
||||
|
||||
export async function getUser(ctx: QueryCtx | MutationCtx): Promise<Doc<"users"> | null> {
|
||||
const identity = await ctx.auth.getUserIdentity();
|
||||
if (!identity) return null;
|
||||
|
||||
return await ctx.db
|
||||
.query("users")
|
||||
.withIndex("by_tokenIdentifier", (q) =>
|
||||
q.eq("tokenIdentifier", identity.tokenIdentifier)
|
||||
)
|
||||
.unique();
|
||||
}
|
||||
|
||||
export async function requireRole(
|
||||
ctx: QueryCtx | MutationCtx,
|
||||
minRole: UserRole
|
||||
): Promise<Doc<"users">> {
|
||||
const user = await getUser(ctx);
|
||||
|
||||
if (!user) {
|
||||
throw new ConvexError({
|
||||
code: "UNAUTHENTICATED",
|
||||
message: "Authentication required",
|
||||
});
|
||||
}
|
||||
|
||||
const userRoleLevel = roleHierarchy[user.role as UserRole] ?? 0;
|
||||
const requiredLevel = roleHierarchy[minRole];
|
||||
|
||||
if (userRoleLevel < requiredLevel) {
|
||||
throw new ConvexError({
|
||||
code: "FORBIDDEN",
|
||||
message: `Role '${minRole}' or higher required`,
|
||||
});
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
// Permission-based check
|
||||
type Permission = "read:users" | "write:users" | "delete:users" | "admin:system";
|
||||
|
||||
const rolePermissions: Record<UserRole, Permission[]> = {
|
||||
user: ["read:users"],
|
||||
moderator: ["read:users", "write:users"],
|
||||
admin: ["read:users", "write:users", "delete:users"],
|
||||
superadmin: ["read:users", "write:users", "delete:users", "admin:system"],
|
||||
};
|
||||
|
||||
export async function requirePermission(
|
||||
ctx: QueryCtx | MutationCtx,
|
||||
permission: Permission
|
||||
): Promise<Doc<"users">> {
|
||||
const user = await getUser(ctx);
|
||||
|
||||
if (!user) {
|
||||
throw new ConvexError({ code: "UNAUTHENTICATED", message: "Authentication required" });
|
||||
}
|
||||
|
||||
const userRole = user.role as UserRole;
|
||||
const permissions = rolePermissions[userRole] ?? [];
|
||||
|
||||
if (!permissions.includes(permission)) {
|
||||
throw new ConvexError({
|
||||
code: "FORBIDDEN",
|
||||
message: `Permission '${permission}' required`,
|
||||
});
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
```
|
||||
|
||||
### Data Access Boundaries Audit
|
||||
|
||||
```typescript
|
||||
// convex/data.ts
|
||||
import { query, mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { getUser, requireRole } from "./lib/auth";
|
||||
import { ConvexError } from "convex/values";
|
||||
|
||||
// Audit: Users can only see their own data
|
||||
export const getMyData = query({
|
||||
args: {},
|
||||
returns: v.array(v.object({
|
||||
_id: v.id("userData"),
|
||||
content: v.string(),
|
||||
})),
|
||||
handler: async (ctx) => {
|
||||
const user = await getUser(ctx);
|
||||
if (!user) return [];
|
||||
|
||||
// SECURITY: Filter by userId
|
||||
return await ctx.db
|
||||
.query("userData")
|
||||
.withIndex("by_user", (q) => q.eq("userId", user._id))
|
||||
.collect();
|
||||
},
|
||||
});
|
||||
|
||||
// Audit: Verify ownership before returning sensitive data
|
||||
export const getSensitiveItem = query({
|
||||
args: { itemId: v.id("sensitiveItems") },
|
||||
returns: v.union(v.object({
|
||||
_id: v.id("sensitiveItems"),
|
||||
secret: v.string(),
|
||||
}), v.null()),
|
||||
handler: async (ctx, args) => {
|
||||
const user = await getUser(ctx);
|
||||
if (!user) return null;
|
||||
|
||||
const item = await ctx.db.get(args.itemId);
|
||||
|
||||
// SECURITY: Verify ownership
|
||||
if (!item || item.ownerId !== user._id) {
|
||||
return null; // Don't reveal if item exists
|
||||
}
|
||||
|
||||
return item;
|
||||
},
|
||||
});
|
||||
|
||||
// Audit: Shared resources with access list
|
||||
export const getSharedDocument = query({
|
||||
args: { docId: v.id("documents") },
|
||||
returns: v.union(v.object({
|
||||
_id: v.id("documents"),
|
||||
content: v.string(),
|
||||
accessLevel: v.string(),
|
||||
}), v.null()),
|
||||
handler: async (ctx, args) => {
|
||||
const user = await getUser(ctx);
|
||||
const doc = await ctx.db.get(args.docId);
|
||||
|
||||
if (!doc) return null;
|
||||
|
||||
// Public documents
|
||||
if (doc.visibility === "public") {
|
||||
return { ...doc, accessLevel: "public" };
|
||||
}
|
||||
|
||||
// Must be authenticated for non-public
|
||||
if (!user) return null;
|
||||
|
||||
// Owner has full access
|
||||
if (doc.ownerId === user._id) {
|
||||
return { ...doc, accessLevel: "owner" };
|
||||
}
|
||||
|
||||
// Check shared access
|
||||
const access = await ctx.db
|
||||
.query("documentAccess")
|
||||
.withIndex("by_doc_and_user", (q) =>
|
||||
q.eq("documentId", args.docId).eq("userId", user._id)
|
||||
)
|
||||
.unique();
|
||||
|
||||
if (!access) return null;
|
||||
|
||||
return { ...doc, accessLevel: access.level };
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Action Isolation Audit
|
||||
|
||||
```typescript
|
||||
// convex/actions.ts
|
||||
"use node";
|
||||
|
||||
import { action, internalAction } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { api, internal } from "./_generated/api";
|
||||
import { ConvexError } from "convex/values";
|
||||
|
||||
// SECURITY: Never expose API keys in responses
|
||||
export const callExternalAPI = action({
|
||||
args: { query: v.string() },
|
||||
returns: v.object({ result: v.string() }),
|
||||
handler: async (ctx, args) => {
|
||||
// Verify user is authenticated
|
||||
const identity = await ctx.auth.getUserIdentity();
|
||||
if (!identity) {
|
||||
throw new ConvexError("Authentication required");
|
||||
}
|
||||
|
||||
// Get API key from environment (not hardcoded)
|
||||
const apiKey = process.env.EXTERNAL_API_KEY;
|
||||
if (!apiKey) {
|
||||
throw new Error("API key not configured");
|
||||
}
|
||||
|
||||
// Log usage for audit trail
|
||||
await ctx.runMutation(internal.audit.logAPICall, {
|
||||
userId: identity.tokenIdentifier,
|
||||
endpoint: "external-api",
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
const response = await fetch("https://api.example.com/query", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${apiKey}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ query: args.query }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
// Don't expose external API error details
|
||||
throw new ConvexError("External service unavailable");
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
// Sanitize response before returning
|
||||
return { result: sanitizeResponse(data) };
|
||||
},
|
||||
});
|
||||
|
||||
// Internal action - not exposed to clients
|
||||
export const _processPayment = internalAction({
|
||||
args: {
|
||||
userId: v.id("users"),
|
||||
amount: v.number(),
|
||||
paymentMethodId: v.string(),
|
||||
},
|
||||
returns: v.object({ success: v.boolean(), transactionId: v.optional(v.string()) }),
|
||||
handler: async (ctx, args) => {
|
||||
const stripeKey = process.env.STRIPE_SECRET_KEY;
|
||||
|
||||
// Process payment with Stripe
|
||||
// This should NEVER be exposed as a public action
|
||||
|
||||
return { success: true, transactionId: "txn_xxx" };
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Rate Limiting Audit
|
||||
|
||||
```typescript
|
||||
// convex/rateLimit.ts
|
||||
import { mutation, query } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { ConvexError } from "convex/values";
|
||||
|
||||
const RATE_LIMITS = {
|
||||
message: { requests: 10, windowMs: 60000 }, // 10 per minute
|
||||
upload: { requests: 5, windowMs: 300000 }, // 5 per 5 minutes
|
||||
api: { requests: 100, windowMs: 3600000 }, // 100 per hour
|
||||
};
|
||||
|
||||
export const checkRateLimit = mutation({
|
||||
args: {
|
||||
userId: v.string(),
|
||||
action: v.union(v.literal("message"), v.literal("upload"), v.literal("api")),
|
||||
},
|
||||
returns: v.object({ allowed: v.boolean(), retryAfter: v.optional(v.number()) }),
|
||||
handler: async (ctx, args) => {
|
||||
const limit = RATE_LIMITS[args.action];
|
||||
const now = Date.now();
|
||||
const windowStart = now - limit.windowMs;
|
||||
|
||||
// Count requests in window
|
||||
const requests = await ctx.db
|
||||
.query("rateLimits")
|
||||
.withIndex("by_user_and_action", (q) =>
|
||||
q.eq("userId", args.userId).eq("action", args.action)
|
||||
)
|
||||
.filter((q) => q.gt(q.field("timestamp"), windowStart))
|
||||
.collect();
|
||||
|
||||
if (requests.length >= limit.requests) {
|
||||
const oldestRequest = requests[0];
|
||||
const retryAfter = oldestRequest.timestamp + limit.windowMs - now;
|
||||
|
||||
return { allowed: false, retryAfter };
|
||||
}
|
||||
|
||||
// Record this request
|
||||
await ctx.db.insert("rateLimits", {
|
||||
userId: args.userId,
|
||||
action: args.action,
|
||||
timestamp: now,
|
||||
});
|
||||
|
||||
return { allowed: true };
|
||||
},
|
||||
});
|
||||
|
||||
// Use in mutations
|
||||
export const sendMessage = mutation({
|
||||
args: { content: v.string() },
|
||||
returns: v.id("messages"),
|
||||
handler: async (ctx, args) => {
|
||||
const identity = await ctx.auth.getUserIdentity();
|
||||
if (!identity) throw new ConvexError("Authentication required");
|
||||
|
||||
// Check rate limit
|
||||
const rateCheck = await checkRateLimit(ctx, {
|
||||
userId: identity.tokenIdentifier,
|
||||
action: "message",
|
||||
});
|
||||
|
||||
if (!rateCheck.allowed) {
|
||||
throw new ConvexError({
|
||||
code: "RATE_LIMITED",
|
||||
message: `Too many requests. Try again in ${Math.ceil(rateCheck.retryAfter! / 1000)} seconds`,
|
||||
});
|
||||
}
|
||||
|
||||
return await ctx.db.insert("messages", {
|
||||
content: args.content,
|
||||
authorId: identity.tokenIdentifier,
|
||||
createdAt: Date.now(),
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Sensitive Operations Protection
|
||||
|
||||
```typescript
|
||||
// convex/admin.ts
|
||||
import { mutation, internalMutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { requireRole, requirePermission } from "./lib/auth";
|
||||
import { internal } from "./_generated/api";
|
||||
|
||||
// Two-factor confirmation for dangerous operations
|
||||
export const deleteAllUserData = mutation({
|
||||
args: {
|
||||
userId: v.id("users"),
|
||||
confirmationCode: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// Require superadmin
|
||||
const admin = await requireRole(ctx, "superadmin");
|
||||
|
||||
// Verify confirmation code
|
||||
const confirmation = await ctx.db
|
||||
.query("confirmations")
|
||||
.withIndex("by_admin_and_code", (q) =>
|
||||
q.eq("adminId", admin._id).eq("code", args.confirmationCode)
|
||||
)
|
||||
.filter((q) => q.gt(q.field("expiresAt"), Date.now()))
|
||||
.unique();
|
||||
|
||||
if (!confirmation || confirmation.action !== "delete_user_data") {
|
||||
throw new ConvexError("Invalid or expired confirmation code");
|
||||
}
|
||||
|
||||
// Delete confirmation to prevent reuse
|
||||
await ctx.db.delete(confirmation._id);
|
||||
|
||||
// Schedule deletion (don't do it inline)
|
||||
await ctx.scheduler.runAfter(0, internal.admin._performDeletion, {
|
||||
userId: args.userId,
|
||||
requestedBy: admin._id,
|
||||
});
|
||||
|
||||
// Audit log
|
||||
await ctx.db.insert("auditLogs", {
|
||||
action: "delete_user_data",
|
||||
targetUserId: args.userId,
|
||||
performedBy: admin._id,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// Generate confirmation code for sensitive action
|
||||
export const requestDeletionConfirmation = mutation({
|
||||
args: { userId: v.id("users") },
|
||||
returns: v.string(),
|
||||
handler: async (ctx, args) => {
|
||||
const admin = await requireRole(ctx, "superadmin");
|
||||
|
||||
const code = generateSecureCode();
|
||||
|
||||
await ctx.db.insert("confirmations", {
|
||||
adminId: admin._id,
|
||||
code,
|
||||
action: "delete_user_data",
|
||||
targetUserId: args.userId,
|
||||
expiresAt: Date.now() + 5 * 60 * 1000, // 5 minutes
|
||||
});
|
||||
|
||||
// In production, send code via secure channel (email, SMS)
|
||||
return code;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Complete Audit Trail System
|
||||
|
||||
```typescript
|
||||
// convex/audit.ts
|
||||
import { mutation, query, internalMutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { getUser, requireRole } from "./lib/auth";
|
||||
|
||||
const auditEventValidator = v.object({
|
||||
_id: v.id("auditLogs"),
|
||||
_creationTime: v.number(),
|
||||
action: v.string(),
|
||||
userId: v.optional(v.string()),
|
||||
resourceType: v.string(),
|
||||
resourceId: v.string(),
|
||||
details: v.optional(v.any()),
|
||||
ipAddress: v.optional(v.string()),
|
||||
timestamp: v.number(),
|
||||
});
|
||||
|
||||
// Internal: Log audit event
|
||||
export const logEvent = internalMutation({
|
||||
args: {
|
||||
action: v.string(),
|
||||
userId: v.optional(v.string()),
|
||||
resourceType: v.string(),
|
||||
resourceId: v.string(),
|
||||
details: v.optional(v.any()),
|
||||
},
|
||||
returns: v.id("auditLogs"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("auditLogs", {
|
||||
...args,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
// Admin: View audit logs
|
||||
export const getAuditLogs = query({
|
||||
args: {
|
||||
resourceType: v.optional(v.string()),
|
||||
userId: v.optional(v.string()),
|
||||
limit: v.optional(v.number()),
|
||||
},
|
||||
returns: v.array(auditEventValidator),
|
||||
handler: async (ctx, args) => {
|
||||
await requireRole(ctx, "admin");
|
||||
|
||||
let query = ctx.db.query("auditLogs");
|
||||
|
||||
if (args.resourceType) {
|
||||
query = query.withIndex("by_resource_type", (q) =>
|
||||
q.eq("resourceType", args.resourceType)
|
||||
);
|
||||
}
|
||||
|
||||
return await query
|
||||
.order("desc")
|
||||
.take(args.limit ?? 100);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Implement defense in depth (multiple security layers)
|
||||
- Log all sensitive operations for audit trails
|
||||
- Use confirmation codes for destructive actions
|
||||
- Rate limit all user-facing endpoints
|
||||
- Never expose internal API keys or errors
|
||||
- Review access patterns regularly
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Single point of failure** - Implement multiple auth checks
|
||||
2. **Missing audit logs** - Log all sensitive operations
|
||||
3. **Trusting client data** - Always validate server-side
|
||||
4. **Exposing error details** - Sanitize error messages
|
||||
5. **No rate limiting** - Always implement rate limits
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- Functions Auth: https://docs.convex.dev/auth/functions-auth
|
||||
- Production Security: https://docs.convex.dev/production
|
||||
378
.claude/skills/convex-security-check/SKILL.md
Normal file
378
.claude/skills/convex-security-check/SKILL.md
Normal file
@@ -0,0 +1,378 @@
|
||||
---
|
||||
name: convex-security-check
|
||||
displayName: Convex Security Check
|
||||
description: Quick security audit checklist covering authentication, function exposure, argument validation, row-level access control, and environment variable handling
|
||||
version: 1.0.0
|
||||
author: Convex
|
||||
tags: [convex, security, authentication, authorization, checklist]
|
||||
---
|
||||
|
||||
# Convex Security Check
|
||||
|
||||
A quick security audit checklist for Convex applications covering authentication, function exposure, argument validation, row-level access control, and environment variable handling.
|
||||
|
||||
## Documentation Sources
|
||||
|
||||
Before implementing, do not assume; fetch the latest documentation:
|
||||
|
||||
- Primary: https://docs.convex.dev/auth
|
||||
- Production Security: https://docs.convex.dev/production
|
||||
- Functions Auth: https://docs.convex.dev/auth/functions-auth
|
||||
- For broader context: https://docs.convex.dev/llms.txt
|
||||
|
||||
## Instructions
|
||||
|
||||
### Security Checklist
|
||||
|
||||
Use this checklist to quickly audit your Convex application's security:
|
||||
|
||||
#### 1. Authentication
|
||||
|
||||
- [ ] Authentication provider configured (Clerk, Auth0, etc.)
|
||||
- [ ] All sensitive queries check `ctx.auth.getUserIdentity()`
|
||||
- [ ] Unauthenticated access explicitly allowed where intended
|
||||
- [ ] Session tokens properly validated
|
||||
|
||||
#### 2. Function Exposure
|
||||
|
||||
- [ ] Public functions (`query`, `mutation`, `action`) reviewed
|
||||
- [ ] Internal functions use `internalQuery`, `internalMutation`, `internalAction`
|
||||
- [ ] No sensitive operations exposed as public functions
|
||||
- [ ] HTTP actions validate origin/authentication
|
||||
|
||||
#### 3. Argument Validation
|
||||
|
||||
- [ ] All functions have explicit `args` validators
|
||||
- [ ] All functions have explicit `returns` validators
|
||||
- [ ] No `v.any()` used for sensitive data
|
||||
- [ ] ID validators use correct table names
|
||||
|
||||
#### 4. Row-Level Access Control
|
||||
|
||||
- [ ] Users can only access their own data
|
||||
- [ ] Admin functions check user roles
|
||||
- [ ] Shared resources have proper access checks
|
||||
- [ ] Deletion functions verify ownership
|
||||
|
||||
#### 5. Environment Variables
|
||||
|
||||
- [ ] API keys stored in environment variables
|
||||
- [ ] No secrets in code or schema
|
||||
- [ ] Different keys for dev/prod environments
|
||||
- [ ] Environment variables accessed only in actions
|
||||
|
||||
### Authentication Check
|
||||
|
||||
```typescript
|
||||
// convex/auth.ts
|
||||
import { query, mutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { ConvexError } from "convex/values";
|
||||
|
||||
// Helper to require authentication
|
||||
async function requireAuth(ctx: QueryCtx | MutationCtx) {
|
||||
const identity = await ctx.auth.getUserIdentity();
|
||||
if (!identity) {
|
||||
throw new ConvexError("Authentication required");
|
||||
}
|
||||
return identity;
|
||||
}
|
||||
|
||||
// Secure query pattern
|
||||
export const getMyProfile = query({
|
||||
args: {},
|
||||
returns: v.union(v.object({
|
||||
_id: v.id("users"),
|
||||
name: v.string(),
|
||||
email: v.string(),
|
||||
}), v.null()),
|
||||
handler: async (ctx) => {
|
||||
const identity = await requireAuth(ctx);
|
||||
|
||||
return await ctx.db
|
||||
.query("users")
|
||||
.withIndex("by_tokenIdentifier", (q) =>
|
||||
q.eq("tokenIdentifier", identity.tokenIdentifier)
|
||||
)
|
||||
.unique();
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Function Exposure Check
|
||||
|
||||
```typescript
|
||||
// PUBLIC - Exposed to clients (review carefully!)
|
||||
export const listPublicPosts = query({
|
||||
args: {},
|
||||
returns: v.array(v.object({ /* ... */ })),
|
||||
handler: async (ctx) => {
|
||||
// Anyone can call this - intentionally public
|
||||
return await ctx.db
|
||||
.query("posts")
|
||||
.withIndex("by_public", (q) => q.eq("isPublic", true))
|
||||
.collect();
|
||||
},
|
||||
});
|
||||
|
||||
// INTERNAL - Only callable from other Convex functions
|
||||
export const _updateUserCredits = internalMutation({
|
||||
args: { userId: v.id("users"), amount: v.number() },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
// This cannot be called directly from clients
|
||||
await ctx.db.patch(args.userId, {
|
||||
credits: args.amount,
|
||||
});
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Argument Validation Check
|
||||
|
||||
```typescript
|
||||
// GOOD: Strict validation
|
||||
export const createPost = mutation({
|
||||
args: {
|
||||
title: v.string(),
|
||||
content: v.string(),
|
||||
category: v.union(
|
||||
v.literal("tech"),
|
||||
v.literal("news"),
|
||||
v.literal("other")
|
||||
),
|
||||
},
|
||||
returns: v.id("posts"),
|
||||
handler: async (ctx, args) => {
|
||||
const identity = await requireAuth(ctx);
|
||||
return await ctx.db.insert("posts", {
|
||||
...args,
|
||||
authorId: identity.tokenIdentifier,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
// BAD: Weak validation
|
||||
export const createPostUnsafe = mutation({
|
||||
args: {
|
||||
data: v.any(), // DANGEROUS: Allows any data
|
||||
},
|
||||
returns: v.id("posts"),
|
||||
handler: async (ctx, args) => {
|
||||
return await ctx.db.insert("posts", args.data);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Row-Level Access Control Check
|
||||
|
||||
```typescript
|
||||
// Verify ownership before update
|
||||
export const updateTask = mutation({
|
||||
args: {
|
||||
taskId: v.id("tasks"),
|
||||
title: v.string(),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const identity = await requireAuth(ctx);
|
||||
|
||||
const task = await ctx.db.get(args.taskId);
|
||||
|
||||
// Check ownership
|
||||
if (!task || task.userId !== identity.tokenIdentifier) {
|
||||
throw new ConvexError("Not authorized to update this task");
|
||||
}
|
||||
|
||||
await ctx.db.patch(args.taskId, { title: args.title });
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// Verify ownership before delete
|
||||
export const deleteTask = mutation({
|
||||
args: { taskId: v.id("tasks") },
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
const identity = await requireAuth(ctx);
|
||||
|
||||
const task = await ctx.db.get(args.taskId);
|
||||
|
||||
if (!task || task.userId !== identity.tokenIdentifier) {
|
||||
throw new ConvexError("Not authorized to delete this task");
|
||||
}
|
||||
|
||||
await ctx.db.delete(args.taskId);
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Environment Variables Check
|
||||
|
||||
```typescript
|
||||
// convex/actions.ts
|
||||
"use node";
|
||||
|
||||
import { action } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
|
||||
export const sendEmail = action({
|
||||
args: {
|
||||
to: v.string(),
|
||||
subject: v.string(),
|
||||
body: v.string(),
|
||||
},
|
||||
returns: v.object({ success: v.boolean() }),
|
||||
handler: async (ctx, args) => {
|
||||
// Access API key from environment
|
||||
const apiKey = process.env.RESEND_API_KEY;
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error("RESEND_API_KEY not configured");
|
||||
}
|
||||
|
||||
const response = await fetch("https://api.resend.com/emails", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Authorization": `Bearer ${apiKey}`,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
from: "noreply@example.com",
|
||||
to: args.to,
|
||||
subject: args.subject,
|
||||
html: args.body,
|
||||
}),
|
||||
});
|
||||
|
||||
return { success: response.ok };
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Complete Security Pattern
|
||||
|
||||
```typescript
|
||||
// convex/secure.ts
|
||||
import { query, mutation, internalMutation } from "./_generated/server";
|
||||
import { v } from "convex/values";
|
||||
import { ConvexError } from "convex/values";
|
||||
|
||||
// Authentication helper
|
||||
async function getAuthenticatedUser(ctx: QueryCtx | MutationCtx) {
|
||||
const identity = await ctx.auth.getUserIdentity();
|
||||
if (!identity) {
|
||||
throw new ConvexError({
|
||||
code: "UNAUTHENTICATED",
|
||||
message: "You must be logged in",
|
||||
});
|
||||
}
|
||||
|
||||
const user = await ctx.db
|
||||
.query("users")
|
||||
.withIndex("by_tokenIdentifier", (q) =>
|
||||
q.eq("tokenIdentifier", identity.tokenIdentifier)
|
||||
)
|
||||
.unique();
|
||||
|
||||
if (!user) {
|
||||
throw new ConvexError({
|
||||
code: "USER_NOT_FOUND",
|
||||
message: "User profile not found",
|
||||
});
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
// Check admin role
|
||||
async function requireAdmin(ctx: QueryCtx | MutationCtx) {
|
||||
const user = await getAuthenticatedUser(ctx);
|
||||
|
||||
if (user.role !== "admin") {
|
||||
throw new ConvexError({
|
||||
code: "FORBIDDEN",
|
||||
message: "Admin access required",
|
||||
});
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
// Public: List own tasks
|
||||
export const listMyTasks = query({
|
||||
args: {},
|
||||
returns: v.array(v.object({
|
||||
_id: v.id("tasks"),
|
||||
title: v.string(),
|
||||
completed: v.boolean(),
|
||||
})),
|
||||
handler: async (ctx) => {
|
||||
const user = await getAuthenticatedUser(ctx);
|
||||
|
||||
return await ctx.db
|
||||
.query("tasks")
|
||||
.withIndex("by_user", (q) => q.eq("userId", user._id))
|
||||
.collect();
|
||||
},
|
||||
});
|
||||
|
||||
// Admin only: List all users
|
||||
export const listAllUsers = query({
|
||||
args: {},
|
||||
returns: v.array(v.object({
|
||||
_id: v.id("users"),
|
||||
name: v.string(),
|
||||
role: v.string(),
|
||||
})),
|
||||
handler: async (ctx) => {
|
||||
await requireAdmin(ctx);
|
||||
|
||||
return await ctx.db.query("users").collect();
|
||||
},
|
||||
});
|
||||
|
||||
// Internal: Update user role (never exposed)
|
||||
export const _setUserRole = internalMutation({
|
||||
args: {
|
||||
userId: v.id("users"),
|
||||
role: v.union(v.literal("user"), v.literal("admin")),
|
||||
},
|
||||
returns: v.null(),
|
||||
handler: async (ctx, args) => {
|
||||
await ctx.db.patch(args.userId, { role: args.role });
|
||||
return null;
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
- Never run `npx convex deploy` unless explicitly instructed
|
||||
- Never run any git commands unless explicitly instructed
|
||||
- Always verify user identity before returning sensitive data
|
||||
- Use internal functions for sensitive operations
|
||||
- Validate all arguments with strict validators
|
||||
- Check ownership before update/delete operations
|
||||
- Store API keys in environment variables
|
||||
- Review all public functions for security implications
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Missing authentication checks** - Always verify identity
|
||||
2. **Exposing internal operations** - Use internalMutation/Query
|
||||
3. **Trusting client-provided IDs** - Verify ownership
|
||||
4. **Using v.any() for arguments** - Use specific validators
|
||||
5. **Hardcoding secrets** - Use environment variables
|
||||
|
||||
## References
|
||||
|
||||
- Convex Documentation: https://docs.convex.dev/
|
||||
- Convex LLMs.txt: https://docs.convex.dev/llms.txt
|
||||
- Authentication: https://docs.convex.dev/auth
|
||||
- Production Security: https://docs.convex.dev/production
|
||||
- Functions Auth: https://docs.convex.dev/auth/functions-auth
|
||||
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"name": "frontend-design",
|
||||
"description": "Frontend design skill for UI/UX implementation",
|
||||
"author": {
|
||||
"name": "Anthropic",
|
||||
"email": "support@anthropic.com"
|
||||
}
|
||||
}
|
||||
202
.claude/skills/frontend-design/LICENSE
Normal file
202
.claude/skills/frontend-design/LICENSE
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
31
.claude/skills/frontend-design/README.md
Normal file
31
.claude/skills/frontend-design/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Frontend Design Plugin
|
||||
|
||||
Generates distinctive, production-grade frontend interfaces that avoid generic AI aesthetics.
|
||||
|
||||
## What It Does
|
||||
|
||||
Claude automatically uses this skill for frontend work. Creates production-ready code with:
|
||||
|
||||
- Bold aesthetic choices
|
||||
- Distinctive typography and color palettes
|
||||
- High-impact animations and visual details
|
||||
- Context-aware implementation
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
"Create a dashboard for a music streaming app"
|
||||
"Build a landing page for an AI security startup"
|
||||
"Design a settings panel with dark mode"
|
||||
```
|
||||
|
||||
Claude will choose a clear aesthetic direction and implement production code with meticulous attention to detail.
|
||||
|
||||
## Learn More
|
||||
|
||||
See the [Frontend Aesthetics Cookbook](https://github.com/anthropics/claude-cookbooks/blob/main/coding/prompting_for_frontend_aesthetics.ipynb) for detailed guidance on prompting for high-quality frontend design.
|
||||
|
||||
## Authors
|
||||
|
||||
Prithvi Rajasekaran (prithvi@anthropic.com)
|
||||
Alexander Bricken (alexander@anthropic.com)
|
||||
@@ -0,0 +1,42 @@
|
||||
---
|
||||
name: frontend-design
|
||||
description: Create distinctive, production-grade frontend interfaces with high design quality. Use this skill when the user asks to build web components, pages, or applications. Generates creative, polished code that avoids generic AI aesthetics.
|
||||
license: Complete terms in LICENSE.txt
|
||||
---
|
||||
|
||||
This skill guides creation of distinctive, production-grade frontend interfaces that avoid generic "AI slop" aesthetics. Implement real working code with exceptional attention to aesthetic details and creative choices.
|
||||
|
||||
The user provides frontend requirements: a component, page, application, or interface to build. They may include context about the purpose, audience, or technical constraints.
|
||||
|
||||
## Design Thinking
|
||||
|
||||
Before coding, understand the context and commit to a BOLD aesthetic direction:
|
||||
- **Purpose**: What problem does this interface solve? Who uses it?
|
||||
- **Tone**: Pick an extreme: brutally minimal, maximalist chaos, retro-futuristic, organic/natural, luxury/refined, playful/toy-like, editorial/magazine, brutalist/raw, art deco/geometric, soft/pastel, industrial/utilitarian, etc. There are so many flavors to choose from. Use these for inspiration but design one that is true to the aesthetic direction.
|
||||
- **Constraints**: Technical requirements (framework, performance, accessibility).
|
||||
- **Differentiation**: What makes this UNFORGETTABLE? What's the one thing someone will remember?
|
||||
|
||||
**CRITICAL**: Choose a clear conceptual direction and execute it with precision. Bold maximalism and refined minimalism both work - the key is intentionality, not intensity.
|
||||
|
||||
Then implement working code (HTML/CSS/JS, React, Vue, etc.) that is:
|
||||
- Production-grade and functional
|
||||
- Visually striking and memorable
|
||||
- Cohesive with a clear aesthetic point-of-view
|
||||
- Meticulously refined in every detail
|
||||
|
||||
## Frontend Aesthetics Guidelines
|
||||
|
||||
Focus on:
|
||||
- **Typography**: Choose fonts that are beautiful, unique, and interesting. Avoid generic fonts like Arial and Inter; opt instead for distinctive choices that elevate the frontend's aesthetics; unexpected, characterful font choices. Pair a distinctive display font with a refined body font.
|
||||
- **Color & Theme**: Commit to a cohesive aesthetic. Use CSS variables for consistency. Dominant colors with sharp accents outperform timid, evenly-distributed palettes.
|
||||
- **Motion**: Use animations for effects and micro-interactions. Prioritize CSS-only solutions for HTML. Use Motion library for React when available. Focus on high-impact moments: one well-orchestrated page load with staggered reveals (animation-delay) creates more delight than scattered micro-interactions. Use scroll-triggering and hover states that surprise.
|
||||
- **Spatial Composition**: Unexpected layouts. Asymmetry. Overlap. Diagonal flow. Grid-breaking elements. Generous negative space OR controlled density.
|
||||
- **Backgrounds & Visual Details**: Create atmosphere and depth rather than defaulting to solid colors. Add contextual effects and textures that match the overall aesthetic. Apply creative forms like gradient meshes, noise textures, geometric patterns, layered transparencies, dramatic shadows, decorative borders, custom cursors, and grain overlays.
|
||||
|
||||
NEVER use generic AI-generated aesthetics like overused font families (Inter, Roboto, Arial, system fonts), cliched color schemes (particularly purple gradients on white backgrounds), predictable layouts and component patterns, and cookie-cutter design that lacks context-specific character.
|
||||
|
||||
Interpret creatively and make unexpected choices that feel genuinely designed for the context. No design should be the same. Vary between light and dark themes, different fonts, different aesthetics. NEVER converge on common choices (Space Grotesk, for example) across generations.
|
||||
|
||||
**IMPORTANT**: Match implementation complexity to the aesthetic vision. Maximalist designs need elaborate code with extensive animations and effects. Minimalist or refined designs need restraint, precision, and careful attention to spacing, typography, and subtle details. Elegance comes from executing the vision well.
|
||||
|
||||
Remember: Claude is capable of extraordinary creative work. Don't hold back, show what can truly be created when thinking outside the box and committing fully to a distinctive vision.
|
||||
393
.claude/skills/payload/SKILL.md
Normal file
393
.claude/skills/payload/SKILL.md
Normal file
@@ -0,0 +1,393 @@
|
||||
---
|
||||
name: payload
|
||||
description: Use when working with Payload CMS projects (payload.config.ts, collections, fields, hooks, access control, Payload API). Use when debugging validation errors, security issues, relationship queries, transactions, or hook behavior.
|
||||
---
|
||||
|
||||
# Payload CMS Application Development
|
||||
|
||||
Payload is a Next.js native CMS with TypeScript-first architecture, providing admin panel, database management, REST/GraphQL APIs, authentication, and file storage.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Task | Solution | Details |
|
||||
| ------------------------ | ----------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Auto-generate slugs | `slugField()` | [FIELDS.md#slug-field-helper](reference/FIELDS.md#slug-field-helper) |
|
||||
| Restrict content by user | Access control with query | [ACCESS-CONTROL.md#row-level-security-with-complex-queries](reference/ACCESS-CONTROL.md#row-level-security-with-complex-queries) |
|
||||
| Local API user ops | `user` + `overrideAccess: false` | [QUERIES.md#access-control-in-local-api](reference/QUERIES.md#access-control-in-local-api) |
|
||||
| Draft/publish workflow | `versions: { drafts: true }` | [COLLECTIONS.md#versioning--drafts](reference/COLLECTIONS.md#versioning--drafts) |
|
||||
| Computed fields | `virtual: true` with afterRead | [FIELDS.md#virtual-fields](reference/FIELDS.md#virtual-fields) |
|
||||
| Conditional fields | `admin.condition` | [FIELDS.md#conditional-fields](reference/FIELDS.md#conditional-fields) |
|
||||
| Custom field validation | `validate` function | [FIELDS.md#validation](reference/FIELDS.md#validation) |
|
||||
| Filter relationship list | `filterOptions` on field | [FIELDS.md#relationship](reference/FIELDS.md#relationship) |
|
||||
| Select specific fields | `select` parameter | [QUERIES.md#field-selection](reference/QUERIES.md#field-selection) |
|
||||
| Auto-set author/dates | beforeChange hook | [HOOKS.md#collection-hooks](reference/HOOKS.md#collection-hooks) |
|
||||
| Prevent hook loops | `req.context` check | [HOOKS.md#context](reference/HOOKS.md#context) |
|
||||
| Cascading deletes | beforeDelete hook | [HOOKS.md#collection-hooks](reference/HOOKS.md#collection-hooks) |
|
||||
| Geospatial queries | `point` field with `near`/`within` | [FIELDS.md#point-geolocation](reference/FIELDS.md#point-geolocation) |
|
||||
| Reverse relationships | `join` field type | [FIELDS.md#join-fields](reference/FIELDS.md#join-fields) |
|
||||
| Next.js revalidation | Context control in afterChange | [HOOKS.md#nextjs-revalidation-with-context-control](reference/HOOKS.md#nextjs-revalidation-with-context-control) |
|
||||
| Query by relationship | Nested property syntax | [QUERIES.md#nested-properties](reference/QUERIES.md#nested-properties) |
|
||||
| Complex queries | AND/OR logic | [QUERIES.md#andor-logic](reference/QUERIES.md#andor-logic) |
|
||||
| Transactions | Pass `req` to operations | [ADAPTERS.md#threading-req-through-operations](reference/ADAPTERS.md#threading-req-through-operations) |
|
||||
| Background jobs | Jobs queue with tasks | [ADVANCED.md#jobs-queue](reference/ADVANCED.md#jobs-queue) |
|
||||
| Custom API routes | Collection custom endpoints | [ADVANCED.md#custom-endpoints](reference/ADVANCED.md#custom-endpoints) |
|
||||
| Cloud storage | Storage adapter plugins | [ADAPTERS.md#storage-adapters](reference/ADAPTERS.md#storage-adapters) |
|
||||
| Multi-language | `localization` config + `localized: true` | [ADVANCED.md#localization](reference/ADVANCED.md#localization) |
|
||||
| Create plugin | `(options) => (config) => Config` | [PLUGIN-DEVELOPMENT.md#plugin-architecture](reference/PLUGIN-DEVELOPMENT.md#plugin-architecture) |
|
||||
| Plugin package setup | Package structure with SWC | [PLUGIN-DEVELOPMENT.md#plugin-package-structure](reference/PLUGIN-DEVELOPMENT.md#plugin-package-structure) |
|
||||
| Add fields to collection | Map collections, spread fields | [PLUGIN-DEVELOPMENT.md#adding-fields-to-collections](reference/PLUGIN-DEVELOPMENT.md#adding-fields-to-collections) |
|
||||
| Plugin hooks | Preserve existing hooks in array | [PLUGIN-DEVELOPMENT.md#adding-hooks](reference/PLUGIN-DEVELOPMENT.md#adding-hooks) |
|
||||
| Check field type | Type guard functions | [FIELD-TYPE-GUARDS.md](reference/FIELD-TYPE-GUARDS.md) |
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
npx create-payload-app@latest my-app
|
||||
cd my-app
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
### Minimal Config
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import { mongooseAdapter } from '@payloadcms/db-mongodb'
|
||||
import { lexicalEditor } from '@payloadcms/richtext-lexical'
|
||||
import path from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
const filename = fileURLToPath(import.meta.url)
|
||||
const dirname = path.dirname(filename)
|
||||
|
||||
export default buildConfig({
|
||||
admin: {
|
||||
user: 'users',
|
||||
importMap: {
|
||||
baseDir: path.resolve(dirname),
|
||||
},
|
||||
},
|
||||
collections: [Users, Media],
|
||||
editor: lexicalEditor(),
|
||||
secret: process.env.PAYLOAD_SECRET,
|
||||
typescript: {
|
||||
outputFile: path.resolve(dirname, 'payload-types.ts'),
|
||||
},
|
||||
db: mongooseAdapter({
|
||||
url: process.env.DATABASE_URL,
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
## Essential Patterns
|
||||
|
||||
### Basic Collection
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
admin: {
|
||||
useAsTitle: 'title',
|
||||
defaultColumns: ['title', 'author', 'status', 'createdAt'],
|
||||
},
|
||||
fields: [
|
||||
{ name: 'title', type: 'text', required: true },
|
||||
{ name: 'slug', type: 'text', unique: true, index: true },
|
||||
{ name: 'content', type: 'richText' },
|
||||
{ name: 'author', type: 'relationship', relationTo: 'users' },
|
||||
],
|
||||
timestamps: true,
|
||||
}
|
||||
```
|
||||
|
||||
For more collection patterns (auth, upload, drafts, live preview), see [COLLECTIONS.md](reference/COLLECTIONS.md).
|
||||
|
||||
### Common Fields
|
||||
|
||||
```ts
|
||||
// Text field
|
||||
{ name: 'title', type: 'text', required: true }
|
||||
|
||||
// Relationship
|
||||
{ name: 'author', type: 'relationship', relationTo: 'users', required: true }
|
||||
|
||||
// Rich text
|
||||
{ name: 'content', type: 'richText', required: true }
|
||||
|
||||
// Select
|
||||
{ name: 'status', type: 'select', options: ['draft', 'published'], defaultValue: 'draft' }
|
||||
|
||||
// Upload
|
||||
{ name: 'image', type: 'upload', relationTo: 'media' }
|
||||
```
|
||||
|
||||
For all field types (array, blocks, point, join, virtual, conditional, etc.), see [FIELDS.md](reference/FIELDS.md).
|
||||
|
||||
### Hook Example
|
||||
|
||||
```ts
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
hooks: {
|
||||
beforeChange: [
|
||||
async ({ data, operation }) => {
|
||||
if (operation === 'create') {
|
||||
data.slug = slugify(data.title)
|
||||
}
|
||||
return data
|
||||
},
|
||||
],
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
For all hook patterns, see [HOOKS.md](reference/HOOKS.md). For access control, see [ACCESS-CONTROL.md](reference/ACCESS-CONTROL.md).
|
||||
|
||||
### Access Control with Type Safety
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
import type { User } from '@/payload-types'
|
||||
|
||||
// Type-safe access control
|
||||
export const adminOnly: Access = ({ req }) => {
|
||||
const user = req.user as User
|
||||
return user?.roles?.includes('admin') || false
|
||||
}
|
||||
|
||||
// Row-level access control
|
||||
export const ownPostsOnly: Access = ({ req }) => {
|
||||
const user = req.user as User
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
|
||||
return {
|
||||
author: { equals: user.id },
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Query Example
|
||||
|
||||
```ts
|
||||
// Local API
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
where: {
|
||||
status: { equals: 'published' },
|
||||
'author.name': { contains: 'john' },
|
||||
},
|
||||
depth: 2,
|
||||
limit: 10,
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
// Query with populated relationships
|
||||
const post = await payload.findByID({
|
||||
collection: 'posts',
|
||||
id: '123',
|
||||
depth: 2, // Populates relationships (default is 2)
|
||||
})
|
||||
// Returns: { author: { id: "user123", name: "John" } }
|
||||
|
||||
// Without depth, relationships return IDs only
|
||||
const post = await payload.findByID({
|
||||
collection: 'posts',
|
||||
id: '123',
|
||||
depth: 0,
|
||||
})
|
||||
// Returns: { author: "user123" }
|
||||
```
|
||||
|
||||
For all query operators and REST/GraphQL examples, see [QUERIES.md](reference/QUERIES.md).
|
||||
|
||||
### Getting Payload Instance
|
||||
|
||||
```ts
|
||||
// In API routes (Next.js)
|
||||
import { getPayload } from 'payload'
|
||||
import config from '@payload-config'
|
||||
|
||||
export async function GET() {
|
||||
const payload = await getPayload({ config })
|
||||
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
})
|
||||
|
||||
return Response.json(posts)
|
||||
}
|
||||
|
||||
// In Server Components
|
||||
import { getPayload } from 'payload'
|
||||
import config from '@payload-config'
|
||||
|
||||
export default async function Page() {
|
||||
const payload = await getPayload({ config })
|
||||
const { docs } = await payload.find({ collection: 'posts' })
|
||||
|
||||
return <div>{docs.map(post => <h1 key={post.id}>{post.title}</h1>)}</div>
|
||||
}
|
||||
```
|
||||
|
||||
## Security Pitfalls
|
||||
|
||||
### 1. Local API Access Control (CRITICAL)
|
||||
|
||||
**By default, Local API operations bypass ALL access control**, even when passing a user.
|
||||
|
||||
```ts
|
||||
// ❌ SECURITY BUG: Passes user but ignores their permissions
|
||||
await payload.find({
|
||||
collection: 'posts',
|
||||
user: someUser, // Access control is BYPASSED!
|
||||
})
|
||||
|
||||
// ✅ SECURE: Actually enforces the user's permissions
|
||||
await payload.find({
|
||||
collection: 'posts',
|
||||
user: someUser,
|
||||
overrideAccess: false, // REQUIRED for access control
|
||||
})
|
||||
```
|
||||
|
||||
**When to use each:**
|
||||
|
||||
- `overrideAccess: true` (default) - Server-side operations you trust (cron jobs, system tasks)
|
||||
- `overrideAccess: false` - When operating on behalf of a user (API routes, webhooks)
|
||||
|
||||
See [QUERIES.md#access-control-in-local-api](reference/QUERIES.md#access-control-in-local-api).
|
||||
|
||||
### 2. Transaction Failures in Hooks
|
||||
|
||||
**Nested operations in hooks without `req` break transaction atomicity.**
|
||||
|
||||
```ts
|
||||
// ❌ DATA CORRUPTION RISK: Separate transaction
|
||||
hooks: {
|
||||
afterChange: [
|
||||
async ({ doc, req }) => {
|
||||
await req.payload.create({
|
||||
collection: 'audit-log',
|
||||
data: { docId: doc.id },
|
||||
// Missing req - runs in separate transaction!
|
||||
})
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
// ✅ ATOMIC: Same transaction
|
||||
hooks: {
|
||||
afterChange: [
|
||||
async ({ doc, req }) => {
|
||||
await req.payload.create({
|
||||
collection: 'audit-log',
|
||||
data: { docId: doc.id },
|
||||
req, // Maintains atomicity
|
||||
})
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
See [ADAPTERS.md#threading-req-through-operations](reference/ADAPTERS.md#threading-req-through-operations).
|
||||
|
||||
### 3. Infinite Hook Loops
|
||||
|
||||
**Hooks triggering operations that trigger the same hooks create infinite loops.**
|
||||
|
||||
```ts
|
||||
// ❌ INFINITE LOOP
|
||||
hooks: {
|
||||
afterChange: [
|
||||
async ({ doc, req }) => {
|
||||
await req.payload.update({
|
||||
collection: 'posts',
|
||||
id: doc.id,
|
||||
data: { views: doc.views + 1 },
|
||||
req,
|
||||
}) // Triggers afterChange again!
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
// ✅ SAFE: Use context flag
|
||||
hooks: {
|
||||
afterChange: [
|
||||
async ({ doc, req, context }) => {
|
||||
if (context.skipHooks) return
|
||||
|
||||
await req.payload.update({
|
||||
collection: 'posts',
|
||||
id: doc.id,
|
||||
data: { views: doc.views + 1 },
|
||||
context: { skipHooks: true },
|
||||
req,
|
||||
})
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
See [HOOKS.md#context](reference/HOOKS.md#context).
|
||||
|
||||
## Project Structure
|
||||
|
||||
```txt
|
||||
src/
|
||||
├── app/
|
||||
│ ├── (frontend)/
|
||||
│ │ └── page.tsx
|
||||
│ └── (payload)/
|
||||
│ └── admin/[[...segments]]/page.tsx
|
||||
├── collections/
|
||||
│ ├── Posts.ts
|
||||
│ ├── Media.ts
|
||||
│ └── Users.ts
|
||||
├── globals/
|
||||
│ └── Header.ts
|
||||
├── components/
|
||||
│ └── CustomField.tsx
|
||||
├── hooks/
|
||||
│ └── slugify.ts
|
||||
└── payload.config.ts
|
||||
```
|
||||
|
||||
## Type Generation
|
||||
|
||||
```ts
|
||||
// payload.config.ts
|
||||
export default buildConfig({
|
||||
typescript: {
|
||||
outputFile: path.resolve(dirname, 'payload-types.ts'),
|
||||
},
|
||||
// ...
|
||||
})
|
||||
|
||||
// Usage
|
||||
import type { Post, User } from '@/payload-types'
|
||||
```
|
||||
|
||||
## Reference Documentation
|
||||
|
||||
- **[FIELDS.md](reference/FIELDS.md)** - All field types, validation, admin options
|
||||
- **[FIELD-TYPE-GUARDS.md](reference/FIELD-TYPE-GUARDS.md)** - Type guards for runtime field type checking and narrowing
|
||||
- **[COLLECTIONS.md](reference/COLLECTIONS.md)** - Collection configs, auth, upload, drafts, live preview
|
||||
- **[HOOKS.md](reference/HOOKS.md)** - Collection hooks, field hooks, context patterns
|
||||
- **[ACCESS-CONTROL.md](reference/ACCESS-CONTROL.md)** - Collection, field, global access control, RBAC, multi-tenant
|
||||
- **[ACCESS-CONTROL-ADVANCED.md](reference/ACCESS-CONTROL-ADVANCED.md)** - Context-aware, time-based, subscription-based access, factory functions, templates
|
||||
- **[QUERIES.md](reference/QUERIES.md)** - Query operators, Local/REST/GraphQL APIs
|
||||
- **[ENDPOINTS.md](reference/ENDPOINTS.md)** - Custom API endpoints: authentication, helpers, request/response patterns
|
||||
- **[ADAPTERS.md](reference/ADAPTERS.md)** - Database, storage, email adapters, transactions
|
||||
- **[ADVANCED.md](reference/ADVANCED.md)** - Authentication, jobs, endpoints, components, plugins, localization
|
||||
- **[PLUGIN-DEVELOPMENT.md](reference/PLUGIN-DEVELOPMENT.md)** - Plugin architecture, monorepo structure, patterns, best practices
|
||||
|
||||
## Resources
|
||||
|
||||
- llms-full.txt: <https://payloadcms.com/llms-full.txt>
|
||||
- Docs: <https://payloadcms.com/docs>
|
||||
- GitHub: <https://github.com/payloadcms/payload>
|
||||
- Examples: <https://github.com/payloadcms/payload/tree/main/examples>
|
||||
- Templates: <https://github.com/payloadcms/payload/tree/main/templates>
|
||||
704
.claude/skills/payload/reference/ACCESS-CONTROL-ADVANCED.md
Normal file
704
.claude/skills/payload/reference/ACCESS-CONTROL-ADVANCED.md
Normal file
@@ -0,0 +1,704 @@
|
||||
# Payload CMS Access Control - Advanced Patterns
|
||||
|
||||
Advanced access control patterns including context-aware access, time-based restrictions, factory functions, and production templates.
|
||||
|
||||
## Context-Aware Access Patterns
|
||||
|
||||
### Locale-Specific Access
|
||||
|
||||
Control access based on user locale for internationalized content.
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const localeSpecificAccess: Access = ({ req: { user, locale } }) => {
|
||||
// Authenticated users can access all locales
|
||||
if (user) return true
|
||||
|
||||
// Public users can only access English content
|
||||
if (locale === 'en') return true
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Usage in collection
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
access: {
|
||||
read: localeSpecificAccess,
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text', localized: true }],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: `docs/access-control/overview.mdx` (req.locale argument)
|
||||
|
||||
### Device-Specific Access
|
||||
|
||||
Restrict access based on device type or user agent.
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const mobileOnlyAccess: Access = ({ req: { headers } }) => {
|
||||
const userAgent = headers?.get('user-agent') || ''
|
||||
return /mobile|android|iphone/i.test(userAgent)
|
||||
}
|
||||
|
||||
export const desktopOnlyAccess: Access = ({ req: { headers } }) => {
|
||||
const userAgent = headers?.get('user-agent') || ''
|
||||
return !/mobile|android|iphone/i.test(userAgent)
|
||||
}
|
||||
|
||||
// Usage
|
||||
export const MobileContent: CollectionConfig = {
|
||||
slug: 'mobile-content',
|
||||
access: {
|
||||
read: mobileOnlyAccess,
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: Synthesized (headers pattern)
|
||||
|
||||
### IP-Based Access
|
||||
|
||||
Restrict access from specific IP addresses (requires middleware/proxy headers).
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const restrictedIpAccess = (allowedIps: string[]): Access => {
|
||||
return ({ req: { headers } }) => {
|
||||
const ip = headers?.get('x-forwarded-for') || headers?.get('x-real-ip')
|
||||
return allowedIps.includes(ip || '')
|
||||
}
|
||||
}
|
||||
|
||||
// Usage
|
||||
const internalIps = ['192.168.1.0/24', '10.0.0.5']
|
||||
|
||||
export const InternalDocs: CollectionConfig = {
|
||||
slug: 'internal-docs',
|
||||
access: {
|
||||
read: restrictedIpAccess(internalIps),
|
||||
},
|
||||
fields: [{ name: 'content', type: 'richText' }],
|
||||
}
|
||||
```
|
||||
|
||||
**Note**: Requires your server to pass IP address via headers (common with proxies/load balancers).
|
||||
|
||||
**Source**: Synthesized (headers pattern)
|
||||
|
||||
## Time-Based Access Patterns
|
||||
|
||||
### Today's Records Only
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const todayOnlyAccess: Access = ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
|
||||
const now = new Date()
|
||||
const startOfDay = new Date(now.getFullYear(), now.getMonth(), now.getDate())
|
||||
const endOfDay = new Date(startOfDay.getTime() + 24 * 60 * 60 * 1000)
|
||||
|
||||
return {
|
||||
createdAt: {
|
||||
greater_than_equal: startOfDay.toISOString(),
|
||||
less_than: endOfDay.toISOString(),
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: `test/access-control/config.ts` (query constraint patterns)
|
||||
|
||||
### Recent Records (Last N Days)
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const recentRecordsAccess = (days: number): Access => {
|
||||
return ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
|
||||
const cutoff = new Date()
|
||||
cutoff.setDate(cutoff.getDate() - days)
|
||||
|
||||
return {
|
||||
createdAt: {
|
||||
greater_than_equal: cutoff.toISOString(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Usage: Users see only last 30 days, admins see all
|
||||
export const Logs: CollectionConfig = {
|
||||
slug: 'logs',
|
||||
access: {
|
||||
read: recentRecordsAccess(30),
|
||||
},
|
||||
fields: [{ name: 'message', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
### Scheduled Content (Publish Date Range)
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const scheduledContentAccess: Access = ({ req: { user } }) => {
|
||||
// Editors see all content
|
||||
if (user?.roles?.includes('admin') || user?.roles?.includes('editor')) {
|
||||
return true
|
||||
}
|
||||
|
||||
const now = new Date().toISOString()
|
||||
|
||||
// Public sees only content within publish window
|
||||
return {
|
||||
and: [
|
||||
{ publishDate: { less_than_equal: now } },
|
||||
{
|
||||
or: [{ unpublishDate: { exists: false } }, { unpublishDate: { greater_than: now } }],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: Synthesized (query constraint + date patterns)
|
||||
|
||||
## Subscription-Based Access
|
||||
|
||||
### Active Subscription Required
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const activeSubscriptionAccess: Access = async ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
|
||||
try {
|
||||
const subscription = await req.payload.findByID({
|
||||
collection: 'subscriptions',
|
||||
id: user.subscriptionId,
|
||||
})
|
||||
|
||||
return subscription?.status === 'active'
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Usage
|
||||
export const PremiumContent: CollectionConfig = {
|
||||
slug: 'premium-content',
|
||||
access: {
|
||||
read: activeSubscriptionAccess,
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
### Subscription Tier-Based Access
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const tierBasedAccess = (requiredTier: string): Access => {
|
||||
const tierHierarchy = ['free', 'basic', 'pro', 'enterprise']
|
||||
|
||||
return async ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
|
||||
try {
|
||||
const subscription = await req.payload.findByID({
|
||||
collection: 'subscriptions',
|
||||
id: user.subscriptionId,
|
||||
})
|
||||
|
||||
if (subscription?.status !== 'active') return false
|
||||
|
||||
const userTierIndex = tierHierarchy.indexOf(subscription.tier)
|
||||
const requiredTierIndex = tierHierarchy.indexOf(requiredTier)
|
||||
|
||||
return userTierIndex >= requiredTierIndex
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Usage
|
||||
export const EnterpriseFeatures: CollectionConfig = {
|
||||
slug: 'enterprise-features',
|
||||
access: {
|
||||
read: tierBasedAccess('enterprise'),
|
||||
},
|
||||
fields: [{ name: 'feature', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: Synthesized (async + cross-collection pattern)
|
||||
|
||||
## Factory Functions
|
||||
|
||||
Reusable functions that generate access control configurations.
|
||||
|
||||
### createRoleBasedAccess
|
||||
|
||||
Generate access control for specific roles.
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export function createRoleBasedAccess(roles: string[]): Access {
|
||||
return ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
return roles.some((role) => user.roles?.includes(role))
|
||||
}
|
||||
}
|
||||
|
||||
// Usage
|
||||
const adminOrEditor = createRoleBasedAccess(['admin', 'editor'])
|
||||
const moderatorAccess = createRoleBasedAccess(['admin', 'moderator'])
|
||||
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
access: {
|
||||
create: adminOrEditor,
|
||||
update: adminOrEditor,
|
||||
delete: moderatorAccess,
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: `test/access-control/config.ts`
|
||||
|
||||
### createOrgScopedAccess
|
||||
|
||||
Generate organization-scoped access with optional admin bypass.
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export function createOrgScopedAccess(allowAdmin = true): Access {
|
||||
return ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
if (allowAdmin && user.roles?.includes('admin')) return true
|
||||
|
||||
return {
|
||||
organizationId: { in: user.organizationIds || [] },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Usage
|
||||
const orgScoped = createOrgScopedAccess() // Admins bypass
|
||||
const strictOrgScoped = createOrgScopedAccess(false) // Admins also scoped
|
||||
|
||||
export const Projects: CollectionConfig = {
|
||||
slug: 'projects',
|
||||
access: {
|
||||
read: orgScoped,
|
||||
update: orgScoped,
|
||||
delete: strictOrgScoped,
|
||||
},
|
||||
fields: [
|
||||
{ name: 'title', type: 'text' },
|
||||
{ name: 'organizationId', type: 'text', required: true },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: `test/access-control/config.ts`
|
||||
|
||||
### createTeamBasedAccess
|
||||
|
||||
Generate team-scoped access with configurable field name.
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export function createTeamBasedAccess(teamField = 'teamId'): Access {
|
||||
return ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
|
||||
return {
|
||||
[teamField]: { in: user.teamIds || [] },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Usage with custom field name
|
||||
const projectTeamAccess = createTeamBasedAccess('projectTeam')
|
||||
|
||||
export const Tasks: CollectionConfig = {
|
||||
slug: 'tasks',
|
||||
access: {
|
||||
read: projectTeamAccess,
|
||||
update: projectTeamAccess,
|
||||
},
|
||||
fields: [
|
||||
{ name: 'title', type: 'text' },
|
||||
{ name: 'projectTeam', type: 'text', required: true },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: Synthesized (org pattern variation)
|
||||
|
||||
### createTimeLimitedAccess
|
||||
|
||||
Generate access limited to records within specified days.
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export function createTimeLimitedAccess(daysAccess: number): Access {
|
||||
return ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
|
||||
const cutoff = new Date()
|
||||
cutoff.setDate(cutoff.getDate() - daysAccess)
|
||||
|
||||
return {
|
||||
createdAt: {
|
||||
greater_than_equal: cutoff.toISOString(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Usage: Users see 90 days, admins see all
|
||||
export const ActivityLogs: CollectionConfig = {
|
||||
slug: 'activity-logs',
|
||||
access: {
|
||||
read: createTimeLimitedAccess(90),
|
||||
},
|
||||
fields: [{ name: 'action', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: Synthesized (time + query pattern)
|
||||
|
||||
## Configuration Templates
|
||||
|
||||
Complete collection configurations for common scenarios.
|
||||
|
||||
### Basic Authenticated Collection
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const BasicCollection: CollectionConfig = {
|
||||
slug: 'basic-collection',
|
||||
access: {
|
||||
create: ({ req: { user } }) => Boolean(user),
|
||||
read: ({ req: { user } }) => Boolean(user),
|
||||
update: ({ req: { user } }) => Boolean(user),
|
||||
delete: ({ req: { user } }) => Boolean(user),
|
||||
},
|
||||
fields: [
|
||||
{ name: 'title', type: 'text', required: true },
|
||||
{ name: 'content', type: 'richText' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: `docs/access-control/collections.mdx`
|
||||
|
||||
### Public + Authenticated Collection
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const PublicAuthCollection: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
access: {
|
||||
// Only admins/editors can create
|
||||
create: ({ req: { user } }) => {
|
||||
return user?.roles?.some((role) => ['admin', 'editor'].includes(role)) || false
|
||||
},
|
||||
|
||||
// Authenticated users see all, public sees only published
|
||||
read: ({ req: { user } }) => {
|
||||
if (user) return true
|
||||
return { _status: { equals: 'published' } }
|
||||
},
|
||||
|
||||
// Only admins/editors can update
|
||||
update: ({ req: { user } }) => {
|
||||
return user?.roles?.some((role) => ['admin', 'editor'].includes(role)) || false
|
||||
},
|
||||
|
||||
// Only admins can delete
|
||||
delete: ({ req: { user } }) => {
|
||||
return user?.roles?.includes('admin') || false
|
||||
},
|
||||
},
|
||||
versions: {
|
||||
drafts: true,
|
||||
},
|
||||
fields: [
|
||||
{ name: 'title', type: 'text', required: true },
|
||||
{ name: 'content', type: 'richText', required: true },
|
||||
{ name: 'author', type: 'relationship', relationTo: 'users' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: `templates/website/src/collections/Posts/index.ts`
|
||||
|
||||
### Multi-User/Self-Service Collection
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const SelfServiceCollection: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: true,
|
||||
access: {
|
||||
// Admins can create users
|
||||
create: ({ req: { user } }) => user?.roles?.includes('admin') || false,
|
||||
|
||||
// Anyone can read user profiles
|
||||
read: () => true,
|
||||
|
||||
// Users can update self, admins can update anyone
|
||||
update: ({ req: { user }, id }) => {
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
return user.id === id
|
||||
},
|
||||
|
||||
// Only admins can delete
|
||||
delete: ({ req: { user } }) => user?.roles?.includes('admin') || false,
|
||||
},
|
||||
fields: [
|
||||
{ name: 'name', type: 'text', required: true },
|
||||
{ name: 'email', type: 'email', required: true },
|
||||
{
|
||||
name: 'roles',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['admin', 'editor', 'user'],
|
||||
access: {
|
||||
// Only admins can read/update roles
|
||||
read: ({ req: { user } }) => user?.roles?.includes('admin') || false,
|
||||
update: ({ req: { user } }) => user?.roles?.includes('admin') || false,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
**Source**: `templates/website/src/collections/Users/index.ts`
|
||||
|
||||
## Debugging Tips
|
||||
|
||||
### Log Access Check Execution
|
||||
|
||||
```ts
|
||||
export const debugAccess: Access = ({ req: { user }, id }) => {
|
||||
console.log('Access check:', {
|
||||
userId: user?.id,
|
||||
userRoles: user?.roles,
|
||||
docId: id,
|
||||
timestamp: new Date().toISOString(),
|
||||
})
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
### Verify Arguments Availability
|
||||
|
||||
```ts
|
||||
export const checkArgsAccess: Access = (args) => {
|
||||
console.log('Available arguments:', {
|
||||
hasReq: 'req' in args,
|
||||
hasUser: args.req?.user ? 'yes' : 'no',
|
||||
hasId: args.id ? 'provided' : 'undefined',
|
||||
hasData: args.data ? 'provided' : 'undefined',
|
||||
})
|
||||
return true
|
||||
}
|
||||
```
|
||||
|
||||
### Measure Async Operation Timing
|
||||
|
||||
```ts
|
||||
export const timedAsyncAccess: Access = async ({ req }) => {
|
||||
const start = Date.now()
|
||||
|
||||
const result = await fetch('https://auth-service.example.com/validate', {
|
||||
headers: { userId: req.user?.id },
|
||||
})
|
||||
|
||||
console.log(`Access check took ${Date.now() - start}ms`)
|
||||
|
||||
return result.ok
|
||||
}
|
||||
```
|
||||
|
||||
### Test Access Without User
|
||||
|
||||
```ts
|
||||
// In test/development
|
||||
const testAccess = await payload.find({
|
||||
collection: 'posts',
|
||||
overrideAccess: false, // Enforce access control
|
||||
user: undefined, // Simulate no user
|
||||
})
|
||||
|
||||
console.log('Public access result:', testAccess.docs.length)
|
||||
```
|
||||
|
||||
**Source**: Synthesized (debugging best practices)
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Async Operations Impact
|
||||
|
||||
```ts
|
||||
// ❌ Slow: Multiple sequential async calls
|
||||
export const slowAccess: Access = async ({ req: { user } }) => {
|
||||
const org = await req.payload.findByID({ collection: 'orgs', id: user.orgId })
|
||||
const team = await req.payload.findByID({ collection: 'teams', id: user.teamId })
|
||||
const subscription = await req.payload.findByID({ collection: 'subs', id: user.subId })
|
||||
|
||||
return org.active && team.active && subscription.active
|
||||
}
|
||||
|
||||
// ✅ Fast: Use query constraints or cache in context
|
||||
export const fastAccess: Access = ({ req: { user, context } }) => {
|
||||
// Cache expensive lookups
|
||||
if (!context.orgStatus) {
|
||||
context.orgStatus = checkOrgStatus(user.orgId)
|
||||
}
|
||||
|
||||
return context.orgStatus
|
||||
}
|
||||
```
|
||||
|
||||
### Query Constraint Optimization
|
||||
|
||||
```ts
|
||||
// ❌ Avoid: Non-indexed fields in constraints
|
||||
export const slowQuery: Access = () => ({
|
||||
'metadata.internalCode': { equals: 'ABC123' }, // Slow if not indexed
|
||||
})
|
||||
|
||||
// ✅ Better: Use indexed fields
|
||||
export const fastQuery: Access = () => ({
|
||||
status: { equals: 'active' }, // Indexed field
|
||||
organizationId: { in: ['org1', 'org2'] }, // Indexed field
|
||||
})
|
||||
```
|
||||
|
||||
### Field Access on Large Arrays
|
||||
|
||||
```ts
|
||||
// ❌ Slow: Complex access on array fields
|
||||
const arrayField: ArrayField = {
|
||||
name: 'items',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'secretData',
|
||||
type: 'text',
|
||||
access: {
|
||||
read: async ({ req }) => {
|
||||
// Async call runs for EVERY array item
|
||||
const result = await expensiveCheck()
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
// ✅ Fast: Simple checks or cache result
|
||||
const optimizedArrayField: ArrayField = {
|
||||
name: 'items',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'secretData',
|
||||
type: 'text',
|
||||
access: {
|
||||
read: ({ req: { user }, context }) => {
|
||||
// Cache once, reuse for all items
|
||||
if (context.canReadSecret === undefined) {
|
||||
context.canReadSecret = user?.roles?.includes('admin')
|
||||
}
|
||||
return context.canReadSecret
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### Avoid N+1 Queries
|
||||
|
||||
```ts
|
||||
// ❌ N+1 Problem: Query per access check
|
||||
export const n1Access: Access = async ({ req, id }) => {
|
||||
// Runs for EACH document in list
|
||||
const doc = await req.payload.findByID({ collection: 'docs', id })
|
||||
return doc.isPublic
|
||||
}
|
||||
|
||||
// ✅ Better: Use query constraint to filter at DB level
|
||||
export const efficientAccess: Access = () => {
|
||||
return { isPublic: { equals: true } }
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Best Practices:**
|
||||
|
||||
1. **Minimize Async Operations**: Use query constraints over async lookups when possible
|
||||
2. **Cache Expensive Checks**: Store results in `req.context` for reuse
|
||||
3. **Index Query Fields**: Ensure fields in query constraints are indexed
|
||||
4. **Avoid Complex Logic in Array Fields**: Simple boolean checks preferred
|
||||
5. **Use Query Constraints**: Let database filter rather than loading all records
|
||||
|
||||
**Source**: Synthesized (operational best practices)
|
||||
|
||||
## Enhanced Best Practices
|
||||
|
||||
Comprehensive security and implementation guidelines:
|
||||
|
||||
1. **Default Deny**: Start with restrictive access, gradually add permissions
|
||||
2. **Type Guards**: Use TypeScript for user type safety and better IDE support
|
||||
3. **Validate Data**: Never trust frontend-provided IDs or data
|
||||
4. **Async for Critical Checks**: Use async operations for important security decisions
|
||||
5. **Consistent Logic**: Apply same rules at field and collection levels
|
||||
6. **Test Edge Cases**: Test with no user, wrong user, admin user scenarios
|
||||
7. **Monitor Access**: Log failed access attempts for security review
|
||||
8. **Regular Audit**: Review access rules quarterly or after major changes
|
||||
9. **Cache Wisely**: Use `req.context` for expensive operations
|
||||
10. **Document Intent**: Add comments explaining complex access rules
|
||||
11. **Avoid Secrets in Client**: Never expose sensitive logic to client-side
|
||||
12. **Rate Limit External Calls**: Protect against DoS on external validation services
|
||||
13. **Handle Errors Gracefully**: Access functions should return `false` on error, not throw
|
||||
14. **Use Environment Vars**: Store configuration (IPs, API keys) in env vars
|
||||
15. **Test Local API**: Remember to set `overrideAccess: false` when testing
|
||||
16. **Consider Performance**: Measure impact of async operations on login time
|
||||
17. **Version Control**: Track access control changes in git history
|
||||
18. **Principle of Least Privilege**: Grant minimum access required for functionality
|
||||
|
||||
**Sources**: `docs/access-control/*.mdx`, synthesized best practices
|
||||
697
.claude/skills/payload/reference/ACCESS-CONTROL.md
Normal file
697
.claude/skills/payload/reference/ACCESS-CONTROL.md
Normal file
@@ -0,0 +1,697 @@
|
||||
# Payload CMS Access Control Reference
|
||||
|
||||
Complete reference for access control patterns across collections, fields, and globals.
|
||||
|
||||
## At a Glance
|
||||
|
||||
| Feature | Scope | Returns | Use Case |
|
||||
| --------------------- | --------------------------------------------------------- | ---------------------- | ---------------------------------- |
|
||||
| **Collection Access** | create, read, update, delete, admin, unlock, readVersions | boolean \| Where query | Document-level permissions |
|
||||
| **Field Access** | create, read, update | boolean only | Field-level visibility/editability |
|
||||
| **Global Access** | read, update, readVersions | boolean \| Where query | Global document permissions |
|
||||
|
||||
## Three Layers of Access Control
|
||||
|
||||
Payload provides three distinct access control layers:
|
||||
|
||||
1. **Collection-Level**: Controls operations on entire documents (create, read, update, delete, admin, unlock, readVersions)
|
||||
2. **Field-Level**: Controls access to individual fields (create, read, update)
|
||||
3. **Global-Level**: Controls access to global documents (read, update, readVersions)
|
||||
|
||||
## Return Value Types
|
||||
|
||||
Access control functions can return:
|
||||
|
||||
- **Boolean**: `true` (allow) or `false` (deny)
|
||||
- **Query Constraint**: `Where` object for row-level security (collection-level only)
|
||||
|
||||
Field-level access does NOT support query constraints - only boolean returns.
|
||||
|
||||
## Operation Decision Tree
|
||||
|
||||
```txt
|
||||
User makes request
|
||||
│
|
||||
├─ Collection access check
|
||||
│ ├─ Returns false? → Deny entire operation
|
||||
│ ├─ Returns true? → Continue
|
||||
│ └─ Returns Where? → Apply query constraint
|
||||
│
|
||||
├─ Field access check (if applicable)
|
||||
│ ├─ Returns false? → Field omitted from result
|
||||
│ └─ Returns true? → Include field
|
||||
│
|
||||
└─ Operation completed
|
||||
```
|
||||
|
||||
## Collection Access Control
|
||||
|
||||
### Basic Patterns
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig, Access } from 'payload'
|
||||
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
access: {
|
||||
// Boolean: Only authenticated users can create
|
||||
create: ({ req: { user } }) => Boolean(user),
|
||||
|
||||
// Query constraint: Public sees published, users see all
|
||||
read: ({ req: { user } }) => {
|
||||
if (user) return true
|
||||
return { status: { equals: 'published' } }
|
||||
},
|
||||
|
||||
// User-specific: Admins or document owner
|
||||
update: ({ req: { user }, id }) => {
|
||||
if (user?.roles?.includes('admin')) return true
|
||||
return { author: { equals: user?.id } }
|
||||
},
|
||||
|
||||
// Async: Check related data
|
||||
delete: async ({ req, id }) => {
|
||||
const hasComments = await req.payload.count({
|
||||
collection: 'comments',
|
||||
where: { post: { equals: id } },
|
||||
})
|
||||
return hasComments === 0
|
||||
},
|
||||
|
||||
// Admin panel visibility
|
||||
admin: ({ req: { user } }) => {
|
||||
return user?.roles?.includes('admin') || user?.roles?.includes('editor')
|
||||
},
|
||||
},
|
||||
fields: [
|
||||
{ name: 'title', type: 'text' },
|
||||
{ name: 'status', type: 'select', options: ['draft', 'published'] },
|
||||
{ name: 'author', type: 'relationship', relationTo: 'users' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### Role-Based Access Control (RBAC) Pattern
|
||||
|
||||
Payload does NOT provide a roles system by default. The following is a commonly accepted pattern for implementing role-based access control in auth collections:
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: true,
|
||||
fields: [
|
||||
{ name: 'name', type: 'text', required: true },
|
||||
{ name: 'email', type: 'email', required: true },
|
||||
{
|
||||
name: 'roles',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['admin', 'editor', 'user'],
|
||||
defaultValue: ['user'],
|
||||
required: true,
|
||||
// Save roles to JWT for access control without database lookups
|
||||
saveToJWT: true,
|
||||
access: {
|
||||
// Only admins can update roles
|
||||
update: ({ req: { user } }) => user?.roles?.includes('admin'),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
**Important Notes:**
|
||||
|
||||
1. **Not Built-In**: Payload does not provide a roles system out of the box. You must add a `roles` field to your auth collection.
|
||||
2. **Save to JWT**: Use `saveToJWT: true` to include roles in the JWT token, enabling role checks without database queries.
|
||||
3. **Default Value**: Set a `defaultValue` to automatically assign new users a default role.
|
||||
4. **Access Control**: Restrict who can modify roles (typically only admins).
|
||||
5. **Role Options**: Define your own role hierarchy based on your application needs.
|
||||
|
||||
**Using Roles in Access Control:**
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
// Check for specific role
|
||||
export const adminOnly: Access = ({ req: { user } }) => {
|
||||
return user?.roles?.includes('admin')
|
||||
}
|
||||
|
||||
// Check for multiple roles
|
||||
export const adminOrEditor: Access = ({ req: { user } }) => {
|
||||
return Boolean(user?.roles?.some((role) => ['admin', 'editor'].includes(role)))
|
||||
}
|
||||
|
||||
// Role hierarchy check
|
||||
export const hasMinimumRole: Access = ({ req: { user } }, minRole: string) => {
|
||||
const roleHierarchy = ['user', 'editor', 'admin']
|
||||
const userHighestRole = Math.max(...(user?.roles?.map((r) => roleHierarchy.indexOf(r)) || [-1]))
|
||||
const requiredRoleIndex = roleHierarchy.indexOf(minRole)
|
||||
|
||||
return userHighestRole >= requiredRoleIndex
|
||||
}
|
||||
```
|
||||
|
||||
### Reusable Access Functions
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
// Anyone (public)
|
||||
export const anyone: Access = () => true
|
||||
|
||||
// Authenticated only
|
||||
export const authenticated: Access = ({ req: { user } }) => Boolean(user)
|
||||
|
||||
// Authenticated or published content
|
||||
export const authenticatedOrPublished: Access = ({ req: { user } }) => {
|
||||
if (user) return true
|
||||
return { _status: { equals: 'published' } }
|
||||
}
|
||||
|
||||
// Admin only
|
||||
export const admins: Access = ({ req: { user } }) => {
|
||||
return user?.roles?.includes('admin')
|
||||
}
|
||||
|
||||
// Admin or editor
|
||||
export const adminsOrEditors: Access = ({ req: { user } }) => {
|
||||
return Boolean(user?.roles?.some((role) => ['admin', 'editor'].includes(role)))
|
||||
}
|
||||
|
||||
// Self or admin
|
||||
export const adminsOrSelf: Access = ({ req: { user } }) => {
|
||||
if (user?.roles?.includes('admin')) return true
|
||||
return { id: { equals: user?.id } }
|
||||
}
|
||||
|
||||
// Usage
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
access: {
|
||||
create: authenticated,
|
||||
read: authenticatedOrPublished,
|
||||
update: adminsOrEditors,
|
||||
delete: admins,
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
### Row-Level Security with Complex Queries
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
// Organization-scoped access
|
||||
export const organizationScoped: Access = ({ req: { user } }) => {
|
||||
if (user?.roles?.includes('admin')) return true
|
||||
|
||||
// Users see only their organization's data
|
||||
return {
|
||||
organization: {
|
||||
equals: user?.organization,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Multiple conditions with AND
|
||||
export const complexAccess: Access = ({ req: { user } }) => {
|
||||
return {
|
||||
and: [
|
||||
{ status: { equals: 'published' } },
|
||||
{ 'author.isActive': { equals: true } },
|
||||
{
|
||||
or: [{ visibility: { equals: 'public' } }, { author: { equals: user?.id } }],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
// Team-based access
|
||||
export const teamMemberAccess: Access = ({ req: { user } }) => {
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
|
||||
return {
|
||||
'team.members': {
|
||||
contains: user.id,
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Header-Based Access (API Keys)
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const apiKeyAccess: Access = ({ req }) => {
|
||||
const apiKey = req.headers.get('x-api-key')
|
||||
|
||||
if (!apiKey) return false
|
||||
|
||||
// Validate against stored keys
|
||||
return apiKey === process.env.VALID_API_KEY
|
||||
}
|
||||
|
||||
// Bearer token validation
|
||||
export const bearerTokenAccess: Access = async ({ req }) => {
|
||||
const auth = req.headers.get('authorization')
|
||||
|
||||
if (!auth?.startsWith('Bearer ')) return false
|
||||
|
||||
const token = auth.slice(7)
|
||||
const isValid = await validateToken(token)
|
||||
|
||||
return isValid
|
||||
}
|
||||
```
|
||||
|
||||
## Field Access Control
|
||||
|
||||
Field access does NOT support query constraints - only boolean returns.
|
||||
|
||||
### Basic Field Access
|
||||
|
||||
```ts
|
||||
import type { NumberField, FieldAccess } from 'payload'
|
||||
|
||||
const salaryReadAccess: FieldAccess = ({ req: { user }, doc }) => {
|
||||
// Self can read own salary
|
||||
if (user?.id === doc?.id) return true
|
||||
// Admin can read all salaries
|
||||
return user?.roles?.includes('admin')
|
||||
}
|
||||
|
||||
const salaryUpdateAccess: FieldAccess = ({ req: { user } }) => {
|
||||
// Only admins can update salary
|
||||
return user?.roles?.includes('admin')
|
||||
}
|
||||
|
||||
const salaryField: NumberField = {
|
||||
name: 'salary',
|
||||
type: 'number',
|
||||
access: {
|
||||
read: salaryReadAccess,
|
||||
update: salaryUpdateAccess,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Sibling Data Access
|
||||
|
||||
```ts
|
||||
import type { ArrayField, FieldAccess } from 'payload'
|
||||
|
||||
const contentReadAccess: FieldAccess = ({ req: { user }, siblingData }) => {
|
||||
// Authenticated users see all
|
||||
if (user) return true
|
||||
// Public sees only if marked public
|
||||
return siblingData?.isPublic === true
|
||||
}
|
||||
|
||||
const arrayField: ArrayField = {
|
||||
name: 'sections',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'isPublic',
|
||||
type: 'checkbox',
|
||||
defaultValue: false,
|
||||
},
|
||||
{
|
||||
name: 'content',
|
||||
type: 'text',
|
||||
access: {
|
||||
read: contentReadAccess,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### Nested Field Access
|
||||
|
||||
```ts
|
||||
import type { GroupField, FieldAccess } from 'payload'
|
||||
|
||||
const internalOnlyAccess: FieldAccess = ({ req: { user } }) => {
|
||||
return user?.roles?.includes('admin') || user?.roles?.includes('internal')
|
||||
}
|
||||
|
||||
const groupField: GroupField = {
|
||||
name: 'internalMetadata',
|
||||
type: 'group',
|
||||
access: {
|
||||
read: internalOnlyAccess,
|
||||
update: internalOnlyAccess,
|
||||
},
|
||||
fields: [
|
||||
{ name: 'internalNotes', type: 'textarea' },
|
||||
{ name: 'priority', type: 'select', options: ['low', 'medium', 'high'] },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### Hiding Admin Fields
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: true,
|
||||
fields: [
|
||||
{ name: 'name', type: 'text', required: true },
|
||||
{ name: 'email', type: 'email', required: true },
|
||||
{
|
||||
name: 'roles',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['admin', 'editor', 'user'],
|
||||
access: {
|
||||
// Hide from UI, but still saved/queried
|
||||
read: ({ req: { user } }) => user?.roles?.includes('admin'),
|
||||
// Only admins can update roles
|
||||
update: ({ req: { user } }) => user?.roles?.includes('admin'),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Global Access Control
|
||||
|
||||
```ts
|
||||
import type { GlobalConfig, Access } from 'payload'
|
||||
|
||||
const adminOnly: Access = ({ req: { user } }) => {
|
||||
return user?.roles?.includes('admin')
|
||||
}
|
||||
|
||||
export const SiteSettings: GlobalConfig = {
|
||||
slug: 'site-settings',
|
||||
access: {
|
||||
read: () => true, // Anyone can read settings
|
||||
update: adminOnly, // Only admins can update
|
||||
readVersions: adminOnly, // Only admins can see version history
|
||||
},
|
||||
fields: [
|
||||
{ name: 'siteName', type: 'text' },
|
||||
{ name: 'maintenanceMode', type: 'checkbox' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Multi-Tenant Access Control
|
||||
|
||||
```ts
|
||||
import type { Access, CollectionConfig } from 'payload'
|
||||
|
||||
// Add tenant field to user type
|
||||
interface User {
|
||||
id: string
|
||||
tenantId: string
|
||||
roles?: string[]
|
||||
}
|
||||
|
||||
// Tenant-scoped access
|
||||
const tenantAccess: Access = ({ req: { user } }) => {
|
||||
// No user = no access
|
||||
if (!user) return false
|
||||
|
||||
// Super admin sees all
|
||||
if (user.roles?.includes('super-admin')) return true
|
||||
|
||||
// Users see only their tenant's data
|
||||
return {
|
||||
tenant: {
|
||||
equals: (user as User).tenantId,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
access: {
|
||||
create: tenantAccess,
|
||||
read: tenantAccess,
|
||||
update: tenantAccess,
|
||||
delete: tenantAccess,
|
||||
},
|
||||
fields: [
|
||||
{ name: 'title', type: 'text' },
|
||||
{
|
||||
name: 'tenant',
|
||||
type: 'text',
|
||||
required: true,
|
||||
access: {
|
||||
// Tenant field hidden from non-admins
|
||||
update: ({ req: { user } }) => user?.roles?.includes('super-admin'),
|
||||
},
|
||||
hooks: {
|
||||
// Auto-set tenant on create
|
||||
beforeChange: [
|
||||
({ req, operation, value }) => {
|
||||
if (operation === 'create' && !value) {
|
||||
return (req.user as User)?.tenantId
|
||||
}
|
||||
return value
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Auth Collection Patterns
|
||||
|
||||
### Self or Admin Pattern
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: true,
|
||||
access: {
|
||||
// Anyone can read user profiles
|
||||
read: () => true,
|
||||
|
||||
// Users can update themselves, admins can update anyone
|
||||
update: ({ req: { user }, id }) => {
|
||||
if (user?.roles?.includes('admin')) return true
|
||||
return user?.id === id
|
||||
},
|
||||
|
||||
// Only admins can delete
|
||||
delete: ({ req: { user } }) => user?.roles?.includes('admin'),
|
||||
},
|
||||
fields: [
|
||||
{ name: 'name', type: 'text' },
|
||||
{ name: 'email', type: 'email' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### Restrict Self-Updates
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig, FieldAccess } from 'payload'
|
||||
|
||||
const preventSelfRoleChange: FieldAccess = ({ req: { user }, id }) => {
|
||||
// Admins can change anyone's roles
|
||||
if (user?.roles?.includes('admin')) return true
|
||||
// Users cannot change their own roles
|
||||
if (user?.id === id) return false
|
||||
return false
|
||||
}
|
||||
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: true,
|
||||
fields: [
|
||||
{
|
||||
name: 'roles',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['admin', 'editor', 'user'],
|
||||
access: {
|
||||
update: preventSelfRoleChange,
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Cross-Collection Validation
|
||||
|
||||
```ts
|
||||
import type { Access } from 'payload'
|
||||
|
||||
// Check if user is a project member before allowing access
|
||||
export const projectMemberAccess: Access = async ({ req, id }) => {
|
||||
const { user, payload } = req
|
||||
|
||||
if (!user) return false
|
||||
if (user.roles?.includes('admin')) return true
|
||||
|
||||
// Check if document exists and user is member
|
||||
const project = await payload.findByID({
|
||||
collection: 'projects',
|
||||
id: id as string,
|
||||
depth: 0,
|
||||
})
|
||||
|
||||
return project.members?.includes(user.id)
|
||||
}
|
||||
|
||||
// Prevent deletion if document has dependencies
|
||||
export const preventDeleteWithDependencies: Access = async ({ req, id }) => {
|
||||
const { payload } = req
|
||||
|
||||
const dependencyCount = await payload.count({
|
||||
collection: 'related-items',
|
||||
where: {
|
||||
parent: { equals: id },
|
||||
},
|
||||
})
|
||||
|
||||
return dependencyCount === 0
|
||||
}
|
||||
```
|
||||
|
||||
## Access Control Function Arguments
|
||||
|
||||
### Collection Create
|
||||
|
||||
```ts
|
||||
create: ({ req, data }) => boolean | Where
|
||||
|
||||
// req: PayloadRequest
|
||||
// - req.user: Authenticated user (if any)
|
||||
// - req.payload: Payload instance for queries
|
||||
// - req.headers: Request headers
|
||||
// - req.locale: Current locale
|
||||
// data: The data being created
|
||||
```
|
||||
|
||||
### Collection Read
|
||||
|
||||
```ts
|
||||
read: ({ req, id }) => boolean | Where
|
||||
|
||||
// req: PayloadRequest
|
||||
// id: Document ID being read
|
||||
// - undefined during Access Operation (login check)
|
||||
// - string when reading specific document
|
||||
```
|
||||
|
||||
### Collection Update
|
||||
|
||||
```ts
|
||||
update: ({ req, id, data }) => boolean | Where
|
||||
|
||||
// req: PayloadRequest
|
||||
// id: Document ID being updated
|
||||
// data: New values being applied
|
||||
```
|
||||
|
||||
### Collection Delete
|
||||
|
||||
```ts
|
||||
delete: ({ req, id }) => boolean | Where
|
||||
|
||||
// req: PayloadRequest
|
||||
// id: Document ID being deleted
|
||||
```
|
||||
|
||||
### Field Create
|
||||
|
||||
```ts
|
||||
access: {
|
||||
create: ({ req, data, siblingData }) => boolean
|
||||
}
|
||||
|
||||
// req: PayloadRequest
|
||||
// data: Full document data
|
||||
// siblingData: Adjacent field values at same level
|
||||
```
|
||||
|
||||
### Field Read
|
||||
|
||||
```ts
|
||||
access: {
|
||||
read: ({ req, id, doc, siblingData }) => boolean
|
||||
}
|
||||
|
||||
// req: PayloadRequest
|
||||
// id: Document ID
|
||||
// doc: Full document
|
||||
// siblingData: Adjacent field values
|
||||
```
|
||||
|
||||
### Field Update
|
||||
|
||||
```ts
|
||||
access: {
|
||||
update: ({ req, id, data, doc, siblingData }) => boolean
|
||||
}
|
||||
|
||||
// req: PayloadRequest
|
||||
// id: Document ID
|
||||
// data: New values
|
||||
// doc: Current document
|
||||
// siblingData: Adjacent field values
|
||||
```
|
||||
|
||||
## Important Notes
|
||||
|
||||
1. **Local API Default**: Access control is **skipped by default** in Local API (`overrideAccess: true`). When passing a `user` parameter, you almost always want to set `overrideAccess: false` to respect that user's permissions:
|
||||
|
||||
```ts
|
||||
// ❌ WRONG: Passes user but bypasses access control (default behavior)
|
||||
await payload.find({
|
||||
collection: 'posts',
|
||||
user: someUser, // User is ignored for access control!
|
||||
})
|
||||
|
||||
// ✅ CORRECT: Respects the user's permissions
|
||||
await payload.find({
|
||||
collection: 'posts',
|
||||
user: someUser,
|
||||
overrideAccess: false, // Required to enforce access control
|
||||
})
|
||||
```
|
||||
|
||||
**Why this matters**: If you pass `user` without `overrideAccess: false`, the operation runs with admin privileges regardless of the user's actual permissions. This is a common security mistake.
|
||||
|
||||
2. **Field Access Limitations**: Field-level access does NOT support query constraints - only boolean returns.
|
||||
|
||||
3. **Admin Panel Visibility**: The `admin` access control determines if a collection appears in the admin panel for a user.
|
||||
|
||||
4. **Access Before Hooks**: Access control executes BEFORE hooks run, so hooks cannot modify access behavior.
|
||||
|
||||
5. **Query Constraints**: Only collection-level `read` access supports query constraints. All other operations and field-level access require boolean returns.
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Reusable Functions**: Create named access functions for common patterns
|
||||
2. **Fail Secure**: Default to `false` for sensitive operations
|
||||
3. **Cache Checks**: Use `req.context` to cache expensive validation
|
||||
4. **Type Safety**: Type your user object for better IDE support
|
||||
5. **Test Thoroughly**: Write tests for complex access control logic
|
||||
6. **Document Intent**: Add comments explaining access rules
|
||||
7. **Audit Logs**: Track access control decisions for security review
|
||||
8. **Performance**: Avoid N+1 queries in access functions
|
||||
9. **Error Handling**: Access functions should not throw - return `false` instead
|
||||
10. **Tenant Hooks**: Auto-set tenant fields in `beforeChange` hooks
|
||||
|
||||
## Advanced Patterns
|
||||
|
||||
For advanced access control patterns including context-aware access, time-based restrictions, subscription-based access, factory functions, configuration templates, debugging tips, and performance optimization, see [ACCESS-CONTROL-ADVANCED.md](ACCESS-CONTROL-ADVANCED.md).
|
||||
326
.claude/skills/payload/reference/ADAPTERS.md
Normal file
326
.claude/skills/payload/reference/ADAPTERS.md
Normal file
@@ -0,0 +1,326 @@
|
||||
# Payload CMS Adapters Reference
|
||||
|
||||
Complete reference for database, storage, and email adapters.
|
||||
|
||||
## Database Adapters
|
||||
|
||||
### MongoDB
|
||||
|
||||
```ts
|
||||
import { mongooseAdapter } from '@payloadcms/db-mongodb'
|
||||
|
||||
export default buildConfig({
|
||||
db: mongooseAdapter({
|
||||
url: process.env.DATABASE_URL,
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
### Postgres
|
||||
|
||||
```ts
|
||||
import { postgresAdapter } from '@payloadcms/db-postgres'
|
||||
|
||||
export default buildConfig({
|
||||
db: postgresAdapter({
|
||||
pool: {
|
||||
connectionString: process.env.DATABASE_URL,
|
||||
},
|
||||
push: false, // Don't auto-push schema changes
|
||||
migrationDir: './migrations',
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
### SQLite
|
||||
|
||||
```ts
|
||||
import { sqliteAdapter } from '@payloadcms/db-sqlite'
|
||||
|
||||
export default buildConfig({
|
||||
db: sqliteAdapter({
|
||||
client: {
|
||||
url: 'file:./payload.db',
|
||||
},
|
||||
transactionOptions: {}, // Enable transactions (disabled by default)
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
## Transactions
|
||||
|
||||
Payload automatically uses transactions for all-or-nothing database operations. Pass `req` to include operations in the same transaction.
|
||||
|
||||
```ts
|
||||
import type { CollectionAfterChangeHook } from 'payload'
|
||||
|
||||
const afterChange: CollectionAfterChangeHook = async ({ req, doc }) => {
|
||||
// This will be part of the same transaction
|
||||
await req.payload.create({
|
||||
req, // Pass req to use same transaction
|
||||
collection: 'audit-log',
|
||||
data: { action: 'created', docId: doc.id },
|
||||
})
|
||||
}
|
||||
|
||||
// Manual transaction control
|
||||
const transactionID = await payload.db.beginTransaction()
|
||||
try {
|
||||
await payload.create({
|
||||
collection: 'orders',
|
||||
data: orderData,
|
||||
req: { transactionID },
|
||||
})
|
||||
await payload.update({
|
||||
collection: 'inventory',
|
||||
id: itemId,
|
||||
data: { stock: newStock },
|
||||
req: { transactionID },
|
||||
})
|
||||
await payload.db.commitTransaction(transactionID)
|
||||
} catch (error) {
|
||||
await payload.db.rollbackTransaction(transactionID)
|
||||
throw error
|
||||
}
|
||||
```
|
||||
|
||||
**Note**: MongoDB requires replicaset for transactions. SQLite requires `transactionOptions: {}` to enable.
|
||||
|
||||
### Threading req Through Operations
|
||||
|
||||
**Critical**: When performing nested operations in hooks, always pass `req` to maintain transaction context. Failing to do so breaks atomicity and can cause partial updates.
|
||||
|
||||
```ts
|
||||
import type { CollectionAfterChangeHook } from 'payload'
|
||||
|
||||
// ✅ CORRECT: Thread req through nested operations
|
||||
const resaveChildren: CollectionAfterChangeHook = async ({ collection, doc, req }) => {
|
||||
// Find children - pass req
|
||||
const children = await req.payload.find({
|
||||
collection: 'children',
|
||||
where: { parent: { equals: doc.id } },
|
||||
req, // Maintains transaction context
|
||||
})
|
||||
|
||||
// Update each child - pass req
|
||||
for (const child of children.docs) {
|
||||
await req.payload.update({
|
||||
id: child.id,
|
||||
collection: 'children',
|
||||
data: { updatedField: 'value' },
|
||||
req, // Same transaction as parent operation
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ❌ WRONG: Missing req breaks transaction
|
||||
const brokenHook: CollectionAfterChangeHook = async ({ collection, doc, req }) => {
|
||||
const children = await req.payload.find({
|
||||
collection: 'children',
|
||||
where: { parent: { equals: doc.id } },
|
||||
// Missing req - separate transaction or no transaction
|
||||
})
|
||||
|
||||
for (const child of children.docs) {
|
||||
await req.payload.update({
|
||||
id: child.id,
|
||||
collection: 'children',
|
||||
data: { updatedField: 'value' },
|
||||
// Missing req - if parent operation fails, these updates persist
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Why This Matters:**
|
||||
|
||||
- **MongoDB (with replica sets)**: Creates atomic session across operations
|
||||
- **PostgreSQL**: All operations use same Drizzle transaction
|
||||
- **SQLite (with transactions enabled)**: Ensures rollback on errors
|
||||
- **Without req**: Each operation runs independently, breaking atomicity
|
||||
|
||||
**When req is Required:**
|
||||
|
||||
- All mutating operations in hooks (create, update, delete)
|
||||
- Operations that must succeed/fail together
|
||||
- When using MongoDB replica sets or Postgres
|
||||
- Any operation that relies on `req.context` or `req.user`
|
||||
|
||||
**When req is Optional:**
|
||||
|
||||
- Read-only lookups independent of current transaction
|
||||
- Operations with `disableTransaction: true`
|
||||
- Administrative operations with `overrideAccess: true`
|
||||
|
||||
## Storage Adapters
|
||||
|
||||
Available storage adapters:
|
||||
|
||||
- **@payloadcms/storage-s3** - AWS S3
|
||||
- **@payloadcms/storage-azure** - Azure Blob Storage
|
||||
- **@payloadcms/storage-gcs** - Google Cloud Storage
|
||||
- **@payloadcms/storage-r2** - Cloudflare R2
|
||||
- **@payloadcms/storage-vercel-blob** - Vercel Blob
|
||||
- **@payloadcms/storage-uploadthing** - Uploadthing
|
||||
|
||||
### AWS S3
|
||||
|
||||
```ts
|
||||
import { s3Storage } from '@payloadcms/storage-s3'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
s3Storage({
|
||||
collections: {
|
||||
media: true,
|
||||
},
|
||||
bucket: process.env.S3_BUCKET,
|
||||
config: {
|
||||
credentials: {
|
||||
accessKeyId: process.env.S3_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.S3_SECRET_ACCESS_KEY,
|
||||
},
|
||||
region: process.env.S3_REGION,
|
||||
},
|
||||
}),
|
||||
],
|
||||
})
|
||||
```
|
||||
|
||||
### Azure Blob Storage
|
||||
|
||||
```ts
|
||||
import { azureStorage } from '@payloadcms/storage-azure'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
azureStorage({
|
||||
collections: {
|
||||
media: true,
|
||||
},
|
||||
connectionString: process.env.AZURE_STORAGE_CONNECTION_STRING,
|
||||
containerName: process.env.AZURE_STORAGE_CONTAINER_NAME,
|
||||
}),
|
||||
],
|
||||
})
|
||||
```
|
||||
|
||||
### Google Cloud Storage
|
||||
|
||||
```ts
|
||||
import { gcsStorage } from '@payloadcms/storage-gcs'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
gcsStorage({
|
||||
collections: {
|
||||
media: true,
|
||||
},
|
||||
bucket: process.env.GCS_BUCKET,
|
||||
options: {
|
||||
projectId: process.env.GCS_PROJECT_ID,
|
||||
credentials: JSON.parse(process.env.GCS_CREDENTIALS),
|
||||
},
|
||||
}),
|
||||
],
|
||||
})
|
||||
```
|
||||
|
||||
### Cloudflare R2
|
||||
|
||||
```ts
|
||||
import { r2Storage } from '@payloadcms/storage-r2'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
r2Storage({
|
||||
collections: {
|
||||
media: true,
|
||||
},
|
||||
bucket: process.env.R2_BUCKET,
|
||||
config: {
|
||||
credentials: {
|
||||
accessKeyId: process.env.R2_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.R2_SECRET_ACCESS_KEY,
|
||||
},
|
||||
region: 'auto',
|
||||
endpoint: process.env.R2_ENDPOINT,
|
||||
},
|
||||
}),
|
||||
],
|
||||
})
|
||||
```
|
||||
|
||||
### Vercel Blob
|
||||
|
||||
```ts
|
||||
import { vercelBlobStorage } from '@payloadcms/storage-vercel-blob'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
vercelBlobStorage({
|
||||
collections: {
|
||||
media: true,
|
||||
},
|
||||
token: process.env.BLOB_READ_WRITE_TOKEN,
|
||||
}),
|
||||
],
|
||||
})
|
||||
```
|
||||
|
||||
### Uploadthing
|
||||
|
||||
```ts
|
||||
import { uploadthingStorage } from '@payloadcms/storage-uploadthing'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
uploadthingStorage({
|
||||
collections: {
|
||||
media: true,
|
||||
},
|
||||
options: {
|
||||
token: process.env.UPLOADTHING_TOKEN,
|
||||
acl: 'public-read',
|
||||
},
|
||||
}),
|
||||
],
|
||||
})
|
||||
```
|
||||
|
||||
## Email Adapters
|
||||
|
||||
### Nodemailer (SMTP)
|
||||
|
||||
```ts
|
||||
import { nodemailerAdapter } from '@payloadcms/email-nodemailer'
|
||||
|
||||
export default buildConfig({
|
||||
email: nodemailerAdapter({
|
||||
defaultFromAddress: 'noreply@example.com',
|
||||
defaultFromName: 'My App',
|
||||
transportOptions: {
|
||||
host: process.env.SMTP_HOST,
|
||||
port: 587,
|
||||
auth: {
|
||||
user: process.env.SMTP_USER,
|
||||
pass: process.env.SMTP_PASS,
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
```
|
||||
|
||||
### Resend
|
||||
|
||||
```ts
|
||||
import { resendAdapter } from '@payloadcms/email-resend'
|
||||
|
||||
export default buildConfig({
|
||||
email: resendAdapter({
|
||||
defaultFromAddress: 'noreply@example.com',
|
||||
defaultFromName: 'My App',
|
||||
apiKey: process.env.RESEND_API_KEY,
|
||||
}),
|
||||
})
|
||||
```
|
||||
386
.claude/skills/payload/reference/ADVANCED.md
Normal file
386
.claude/skills/payload/reference/ADVANCED.md
Normal file
@@ -0,0 +1,386 @@
|
||||
# Payload CMS Advanced Features
|
||||
|
||||
Complete reference for authentication, jobs, custom endpoints, components, plugins, and localization.
|
||||
|
||||
## Authentication
|
||||
|
||||
### Login
|
||||
|
||||
```ts
|
||||
// REST API
|
||||
const response = await fetch('/api/users/login', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
email: 'user@example.com',
|
||||
password: 'password',
|
||||
}),
|
||||
})
|
||||
|
||||
// Local API
|
||||
const result = await payload.login({
|
||||
collection: 'users',
|
||||
data: {
|
||||
email: 'user@example.com',
|
||||
password: 'password',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Forgot Password
|
||||
|
||||
```ts
|
||||
await payload.forgotPassword({
|
||||
collection: 'users',
|
||||
data: {
|
||||
email: 'user@example.com',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Custom Strategy
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig, Strategy } from 'payload'
|
||||
|
||||
const customStrategy: Strategy = {
|
||||
name: 'custom',
|
||||
authenticate: async ({ payload, headers }) => {
|
||||
const token = headers.get('authorization')?.split(' ')[1]
|
||||
if (!token) return { user: null }
|
||||
|
||||
const user = await verifyToken(token)
|
||||
return { user }
|
||||
},
|
||||
}
|
||||
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: {
|
||||
strategies: [customStrategy],
|
||||
},
|
||||
fields: [],
|
||||
}
|
||||
```
|
||||
|
||||
### API Keys
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const APIKeys: CollectionConfig = {
|
||||
slug: 'api-keys',
|
||||
auth: {
|
||||
disableLocalStrategy: true,
|
||||
useAPIKey: true,
|
||||
},
|
||||
fields: [],
|
||||
}
|
||||
```
|
||||
|
||||
## Jobs Queue
|
||||
|
||||
Offload long-running or scheduled tasks to background workers.
|
||||
|
||||
### Tasks
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import type { TaskConfig } from 'payload'
|
||||
|
||||
export default buildConfig({
|
||||
jobs: {
|
||||
tasks: [
|
||||
{
|
||||
slug: 'sendWelcomeEmail',
|
||||
inputSchema: [
|
||||
{ name: 'userEmail', type: 'text', required: true },
|
||||
{ name: 'userName', type: 'text', required: true },
|
||||
],
|
||||
outputSchema: [{ name: 'emailSent', type: 'checkbox', required: true }],
|
||||
retries: 2, // Retry up to 2 times on failure
|
||||
handler: async ({ input, req }) => {
|
||||
await sendEmail({
|
||||
to: input.userEmail,
|
||||
subject: `Welcome ${input.userName}`,
|
||||
})
|
||||
return { output: { emailSent: true } }
|
||||
},
|
||||
} as TaskConfig<'sendWelcomeEmail'>,
|
||||
],
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Queueing Jobs
|
||||
|
||||
```ts
|
||||
// In a hook or endpoint
|
||||
await req.payload.jobs.queue({
|
||||
task: 'sendWelcomeEmail',
|
||||
input: {
|
||||
userEmail: 'user@example.com',
|
||||
userName: 'John',
|
||||
},
|
||||
waitUntil: new Date('2024-12-31'), // Optional: schedule for future
|
||||
})
|
||||
```
|
||||
|
||||
### Workflows
|
||||
|
||||
Multi-step jobs that run in sequence:
|
||||
|
||||
```ts
|
||||
{
|
||||
slug: 'onboardUser',
|
||||
inputSchema: [{ name: 'userId', type: 'text' }],
|
||||
handler: async ({ job, req }) => {
|
||||
const results = await job.runInlineTask({
|
||||
task: async ({ input }) => {
|
||||
// Step 1: Send welcome email
|
||||
await sendEmail(input.userId)
|
||||
return { output: { emailSent: true } }
|
||||
},
|
||||
})
|
||||
|
||||
await job.runInlineTask({
|
||||
task: async () => {
|
||||
// Step 2: Create onboarding tasks
|
||||
await createTasks()
|
||||
return { output: { tasksCreated: true } }
|
||||
},
|
||||
})
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Custom Endpoints
|
||||
|
||||
Add custom REST API routes to collections, globals, or root config. See [ENDPOINTS.md](ENDPOINTS.md) for detailed patterns, authentication, helpers, and real-world examples.
|
||||
|
||||
### Root Endpoints
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import type { Endpoint } from 'payload'
|
||||
|
||||
const helloEndpoint: Endpoint = {
|
||||
path: '/hello',
|
||||
method: 'get',
|
||||
handler: () => {
|
||||
return Response.json({ message: 'Hello!' })
|
||||
},
|
||||
}
|
||||
|
||||
const greetEndpoint: Endpoint = {
|
||||
path: '/greet/:name',
|
||||
method: 'get',
|
||||
handler: (req) => {
|
||||
return Response.json({
|
||||
message: `Hello ${req.routeParams.name}!`,
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
export default buildConfig({
|
||||
endpoints: [helloEndpoint, greetEndpoint],
|
||||
collections: [],
|
||||
secret: process.env.PAYLOAD_SECRET || '',
|
||||
})
|
||||
```
|
||||
|
||||
### Collection Endpoints
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig, Endpoint } from 'payload'
|
||||
|
||||
const featuredEndpoint: Endpoint = {
|
||||
path: '/featured',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
const posts = await req.payload.find({
|
||||
collection: 'posts',
|
||||
where: { featured: { equals: true } },
|
||||
})
|
||||
return Response.json(posts)
|
||||
},
|
||||
}
|
||||
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
endpoints: [featuredEndpoint],
|
||||
fields: [
|
||||
{ name: 'title', type: 'text' },
|
||||
{ name: 'featured', type: 'checkbox' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Custom Components
|
||||
|
||||
### Field Component (Client)
|
||||
|
||||
```tsx
|
||||
'use client'
|
||||
import { useField } from '@payloadcms/ui'
|
||||
import type { TextFieldClientComponent } from 'payload'
|
||||
|
||||
export const CustomField: TextFieldClientComponent = () => {
|
||||
const { value, setValue } = useField()
|
||||
|
||||
return <input value={value || ''} onChange={(e) => setValue(e.target.value)} />
|
||||
}
|
||||
```
|
||||
|
||||
### Custom View
|
||||
|
||||
```tsx
|
||||
'use client'
|
||||
import { DefaultTemplate } from '@payloadcms/next/templates'
|
||||
|
||||
export const CustomView = () => {
|
||||
return (
|
||||
<DefaultTemplate>
|
||||
<h1>Custom Dashboard</h1>
|
||||
{/* Your content */}
|
||||
</DefaultTemplate>
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
### Admin Config
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
|
||||
export default buildConfig({
|
||||
admin: {
|
||||
components: {
|
||||
beforeDashboard: ['/components/BeforeDashboard'],
|
||||
beforeLogin: ['/components/BeforeLogin'],
|
||||
views: {
|
||||
custom: {
|
||||
Component: '/views/Custom',
|
||||
path: '/custom',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
collections: [],
|
||||
secret: process.env.PAYLOAD_SECRET || '',
|
||||
})
|
||||
```
|
||||
|
||||
## Plugins
|
||||
|
||||
### Available Plugins
|
||||
|
||||
- **@payloadcms/plugin-seo** - SEO fields with meta title/description, Open Graph, preview generation
|
||||
- **@payloadcms/plugin-redirects** - Manage URL redirects (301/302) for Next.js apps
|
||||
- **@payloadcms/plugin-nested-docs** - Hierarchical document structures with breadcrumbs
|
||||
- **@payloadcms/plugin-form-builder** - Dynamic form builder with submissions and validation
|
||||
- **@payloadcms/plugin-search** - Full-text search integration (Algolia support)
|
||||
- **@payloadcms/plugin-stripe** - Stripe payments, subscriptions, webhooks
|
||||
- **@payloadcms/plugin-ecommerce** - Complete ecommerce solution (products, variants, carts, orders)
|
||||
- **@payloadcms/plugin-import-export** - Import/export data via CSV
|
||||
- **@payloadcms/plugin-multi-tenant** - Multi-tenancy with tenant isolation
|
||||
- **@payloadcms/plugin-sentry** - Sentry error tracking integration
|
||||
- **@payloadcms/plugin-mcp** - Model Context Protocol for AI integrations
|
||||
|
||||
### Using Plugins
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import { seoPlugin } from '@payloadcms/plugin-seo'
|
||||
import { redirectsPlugin } from '@payloadcms/plugin-redirects'
|
||||
|
||||
export default buildConfig({
|
||||
plugins: [
|
||||
seoPlugin({
|
||||
collections: ['posts', 'pages'],
|
||||
}),
|
||||
redirectsPlugin({
|
||||
collections: ['pages'],
|
||||
}),
|
||||
],
|
||||
collections: [],
|
||||
secret: process.env.PAYLOAD_SECRET || '',
|
||||
})
|
||||
```
|
||||
|
||||
### Creating Plugins
|
||||
|
||||
```ts
|
||||
import type { Config } from 'payload'
|
||||
|
||||
interface PluginOptions {
|
||||
enabled?: boolean
|
||||
}
|
||||
|
||||
export const myPlugin =
|
||||
(options: PluginOptions) =>
|
||||
(config: Config): Config => ({
|
||||
...config,
|
||||
collections: [
|
||||
...(config.collections || []),
|
||||
{
|
||||
slug: 'plugin-collection',
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
},
|
||||
],
|
||||
onInit: async (payload) => {
|
||||
if (config.onInit) await config.onInit(payload)
|
||||
// Plugin initialization
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Localization
|
||||
|
||||
```ts
|
||||
import { buildConfig } from 'payload'
|
||||
import type { Field, Payload } from 'payload'
|
||||
|
||||
export default buildConfig({
|
||||
localization: {
|
||||
locales: ['en', 'es', 'de'],
|
||||
defaultLocale: 'en',
|
||||
fallback: true,
|
||||
},
|
||||
collections: [],
|
||||
secret: process.env.PAYLOAD_SECRET || '',
|
||||
})
|
||||
|
||||
// Localized field
|
||||
const localizedField: TextField = {
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
localized: true,
|
||||
}
|
||||
|
||||
// Query with locale
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
locale: 'es',
|
||||
})
|
||||
```
|
||||
|
||||
## TypeScript Type References
|
||||
|
||||
For complete TypeScript type definitions and signatures, reference these files from the Payload source:
|
||||
|
||||
### Core Configuration Types
|
||||
|
||||
- **[All Commonly-Used Types](https://github.com/payloadcms/payload/blob/main/packages/payload/src/index.ts)** - Check here first for commonly used types and interfaces. All core types are exported from this file.
|
||||
|
||||
### Database & Adapters
|
||||
|
||||
- **[Database Adapter Types](https://github.com/payloadcms/payload/blob/main/packages/payload/src/database/types.ts)** - Base adapter interface
|
||||
- **[MongoDB Adapter](https://github.com/payloadcms/payload/blob/main/packages/db-mongodb/src/index.ts)** - MongoDB-specific options
|
||||
- **[Postgres Adapter](https://github.com/payloadcms/payload/blob/main/packages/db-postgres/src/index.ts)** - Postgres-specific options
|
||||
|
||||
### Rich Text & Plugins
|
||||
|
||||
- **[Lexical Types](https://github.com/payloadcms/payload/blob/main/packages/richtext-lexical/src/exports/server/index.ts)** - Lexical editor configuration
|
||||
|
||||
When users need detailed type information, fetch these URLs to provide complete signatures and optional parameters.
|
||||
303
.claude/skills/payload/reference/COLLECTIONS.md
Normal file
303
.claude/skills/payload/reference/COLLECTIONS.md
Normal file
@@ -0,0 +1,303 @@
|
||||
# Payload CMS Collections Reference
|
||||
|
||||
Complete reference for collection configurations and patterns.
|
||||
|
||||
## Basic Collection
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
labels: {
|
||||
singular: 'Post',
|
||||
plural: 'Posts',
|
||||
},
|
||||
admin: {
|
||||
useAsTitle: 'title',
|
||||
defaultColumns: ['title', 'author', 'status', 'createdAt'],
|
||||
group: 'Content', // Organize in admin sidebar
|
||||
description: 'Blog posts and articles',
|
||||
listSearchableFields: ['title', 'slug'],
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
required: true,
|
||||
index: true,
|
||||
},
|
||||
{
|
||||
name: 'slug',
|
||||
type: 'text',
|
||||
unique: true,
|
||||
index: true,
|
||||
admin: { position: 'sidebar' },
|
||||
},
|
||||
{
|
||||
name: 'status',
|
||||
type: 'select',
|
||||
options: ['draft', 'published'],
|
||||
defaultValue: 'draft',
|
||||
},
|
||||
],
|
||||
defaultSort: '-createdAt',
|
||||
timestamps: true,
|
||||
}
|
||||
```
|
||||
|
||||
## Auth Collection
|
||||
|
||||
```ts
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
auth: {
|
||||
tokenExpiration: 7200, // 2 hours
|
||||
verify: true,
|
||||
maxLoginAttempts: 5,
|
||||
lockTime: 600000, // 10 minutes
|
||||
useAPIKey: true,
|
||||
},
|
||||
admin: {
|
||||
useAsTitle: 'email',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'roles',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['admin', 'editor', 'user'],
|
||||
required: true,
|
||||
defaultValue: ['user'],
|
||||
saveToJWT: true,
|
||||
},
|
||||
{
|
||||
name: 'name',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Upload Collection
|
||||
|
||||
```ts
|
||||
export const Media: CollectionConfig = {
|
||||
slug: 'media',
|
||||
upload: {
|
||||
staticDir: 'media',
|
||||
mimeTypes: ['image/*'],
|
||||
imageSizes: [
|
||||
{
|
||||
name: 'thumbnail',
|
||||
width: 400,
|
||||
height: 300,
|
||||
position: 'centre',
|
||||
},
|
||||
{
|
||||
name: 'card',
|
||||
width: 768,
|
||||
height: 1024,
|
||||
},
|
||||
],
|
||||
adminThumbnail: 'thumbnail',
|
||||
focalPoint: true,
|
||||
crop: true,
|
||||
},
|
||||
access: {
|
||||
read: () => true,
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'alt',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'caption',
|
||||
type: 'text',
|
||||
localized: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Live Preview
|
||||
|
||||
Enable real-time content preview during editing.
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
const generatePreviewPath = ({
|
||||
slug,
|
||||
collection,
|
||||
req,
|
||||
}: {
|
||||
slug: string
|
||||
collection: string
|
||||
req: any
|
||||
}) => {
|
||||
const baseUrl = process.env.NEXT_PUBLIC_SERVER_URL
|
||||
return `${baseUrl}/api/preview?slug=${slug}&collection=${collection}`
|
||||
}
|
||||
|
||||
export const Pages: CollectionConfig = {
|
||||
slug: 'pages',
|
||||
admin: {
|
||||
useAsTitle: 'title',
|
||||
// Live preview during editing
|
||||
livePreview: {
|
||||
url: ({ data, req }) =>
|
||||
generatePreviewPath({
|
||||
slug: data?.slug as string,
|
||||
collection: 'pages',
|
||||
req,
|
||||
}),
|
||||
},
|
||||
// Static preview button
|
||||
preview: (data, { req }) =>
|
||||
generatePreviewPath({
|
||||
slug: data?.slug as string,
|
||||
collection: 'pages',
|
||||
req,
|
||||
}),
|
||||
},
|
||||
fields: [
|
||||
{ name: 'title', type: 'text' },
|
||||
{ name: 'slug', type: 'text' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Versioning & Drafts
|
||||
|
||||
Payload maintains version history and supports draft/publish workflows.
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
// Basic versioning (audit log only)
|
||||
export const Users: CollectionConfig = {
|
||||
slug: 'users',
|
||||
versions: true, // or { maxPerDoc: 100 }
|
||||
fields: [{ name: 'name', type: 'text' }],
|
||||
}
|
||||
|
||||
// Drafts enabled (draft/publish workflow)
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
versions: {
|
||||
drafts: true, // Enables _status field
|
||||
maxPerDoc: 50,
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
|
||||
// Full configuration with autosave and scheduled publish
|
||||
export const Pages: CollectionConfig = {
|
||||
slug: 'pages',
|
||||
versions: {
|
||||
drafts: {
|
||||
autosave: true, // Auto-save while editing
|
||||
schedulePublish: true, // Schedule future publish/unpublish
|
||||
validate: false, // Don't validate drafts (default)
|
||||
},
|
||||
maxPerDoc: 100, // Keep last 100 versions (0 = unlimited)
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
### Draft API Usage
|
||||
|
||||
```ts
|
||||
// Create draft
|
||||
await payload.create({
|
||||
collection: 'posts',
|
||||
data: { title: 'Draft Post' },
|
||||
draft: true, // Saves as draft, skips required field validation
|
||||
})
|
||||
|
||||
// Update as draft
|
||||
await payload.update({
|
||||
collection: 'posts',
|
||||
id: '123',
|
||||
data: { title: 'Updated Draft' },
|
||||
draft: true,
|
||||
})
|
||||
|
||||
// Read with drafts (returns newest draft if available)
|
||||
const post = await payload.findByID({
|
||||
collection: 'posts',
|
||||
id: '123',
|
||||
draft: true, // Returns draft version if exists
|
||||
})
|
||||
|
||||
// Query only published (REST API)
|
||||
// GET /api/posts (returns only _status: 'published')
|
||||
|
||||
// Access control for drafts
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
versions: { drafts: true },
|
||||
access: {
|
||||
read: ({ req: { user } }) => {
|
||||
// Public can only see published
|
||||
if (!user) return { _status: { equals: 'published' } }
|
||||
// Authenticated can see all
|
||||
return true
|
||||
},
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
### Document Status
|
||||
|
||||
The `_status` field is auto-injected when drafts are enabled:
|
||||
|
||||
- `draft` - Never published
|
||||
- `published` - Published with no newer drafts
|
||||
- `changed` - Published but has newer unpublished drafts
|
||||
|
||||
## Globals
|
||||
|
||||
Globals are single-instance documents (not collections).
|
||||
|
||||
```ts
|
||||
import type { GlobalConfig } from 'payload'
|
||||
|
||||
export const Header: GlobalConfig = {
|
||||
slug: 'header',
|
||||
label: 'Header',
|
||||
admin: {
|
||||
group: 'Settings',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'logo',
|
||||
type: 'upload',
|
||||
relationTo: 'media',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'nav',
|
||||
type: 'array',
|
||||
maxRows: 8,
|
||||
fields: [
|
||||
{
|
||||
name: 'link',
|
||||
type: 'relationship',
|
||||
relationTo: 'pages',
|
||||
},
|
||||
{
|
||||
name: 'label',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
634
.claude/skills/payload/reference/ENDPOINTS.md
Normal file
634
.claude/skills/payload/reference/ENDPOINTS.md
Normal file
@@ -0,0 +1,634 @@
|
||||
# Payload Custom API Endpoints Reference
|
||||
|
||||
Custom REST API endpoints extend Payload's auto-generated CRUD operations with custom logic, authentication flows, webhooks, and integrations.
|
||||
|
||||
## Quick Reference
|
||||
|
||||
### Endpoint Configuration
|
||||
|
||||
| Property | Type | Description |
|
||||
| --------- | ------------------------------------------------- | --------------------------------------------------------------- |
|
||||
| `path` | `string` | Route path after collection/global slug (e.g., `/:id/tracking`) |
|
||||
| `method` | `'get' \| 'post' \| 'put' \| 'patch' \| 'delete'` | HTTP method (lowercase) |
|
||||
| `handler` | `(req: PayloadRequest) => Promise<Response>` | Async function returning Web API Response |
|
||||
| `custom` | `Record<string, any>` | Extension point for plugins/metadata |
|
||||
|
||||
### Request Context
|
||||
|
||||
| Property | Type | Description |
|
||||
| ----------------- | ----------------------- | ------------------------------------------------------ |
|
||||
| `req.user` | `User \| null` | Authenticated user (null if not authenticated) |
|
||||
| `req.payload` | `Payload` | Payload instance for operations (find, create...) |
|
||||
| `req.routeParams` | `Record<string, any>` | Path parameters (e.g., `:id`) |
|
||||
| `req.url` | `string` | Full request URL |
|
||||
| `req.method` | `string` | HTTP method |
|
||||
| `req.headers` | `Headers` | Request headers |
|
||||
| `req.json()` | `() => Promise<any>` | Parse JSON body |
|
||||
| `req.text()` | `() => Promise<string>` | Read body as text |
|
||||
| `req.data` | `any` | Parsed body (after `addDataAndFileToRequest()`) |
|
||||
| `req.file` | `File` | Uploaded file (after `addDataAndFileToRequest()`) |
|
||||
| `req.locale` | `string` | Request locale (after `addLocalesToRequestFromData()`) |
|
||||
| `req.i18n` | `I18n` | i18n instance |
|
||||
| `req.t` | `TFunction` | Translation function |
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Authentication Check
|
||||
|
||||
Custom endpoints are **not authenticated by default**. Check `req.user` to enforce authentication.
|
||||
|
||||
```ts
|
||||
import { APIError } from 'payload'
|
||||
|
||||
export const authenticatedEndpoint = {
|
||||
path: '/protected',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
if (!req.user) {
|
||||
throw new APIError('Unauthorized', 401)
|
||||
}
|
||||
|
||||
// User is authenticated
|
||||
return Response.json({ message: 'Access granted' })
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Using Payload Operations
|
||||
|
||||
Use `req.payload` for database operations with access control and hooks.
|
||||
|
||||
```ts
|
||||
export const getRelatedPosts = {
|
||||
path: '/:id/related',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
const { id } = req.routeParams
|
||||
|
||||
// Find related posts
|
||||
const posts = await req.payload.find({
|
||||
collection: 'posts',
|
||||
where: {
|
||||
category: {
|
||||
equals: id,
|
||||
},
|
||||
},
|
||||
limit: 5,
|
||||
sort: '-createdAt',
|
||||
})
|
||||
|
||||
return Response.json(posts)
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Route Parameters
|
||||
|
||||
Access path parameters via `req.routeParams`.
|
||||
|
||||
```ts
|
||||
export const getTrackingEndpoint = {
|
||||
path: '/:id/tracking',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
const orderId = req.routeParams.id
|
||||
|
||||
const tracking = await getTrackingInfo(orderId)
|
||||
|
||||
if (!tracking) {
|
||||
return Response.json({ error: 'not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
return Response.json(tracking)
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Request Body Handling
|
||||
|
||||
**Option 1: Manual JSON parsing**
|
||||
|
||||
```ts
|
||||
export const createEndpoint = {
|
||||
path: '/create',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
const data = await req.json()
|
||||
|
||||
const result = await req.payload.create({
|
||||
collection: 'posts',
|
||||
data,
|
||||
})
|
||||
|
||||
return Response.json(result)
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
**Option 2: Using helper (handles JSON + files)**
|
||||
|
||||
```ts
|
||||
import { addDataAndFileToRequest } from 'payload'
|
||||
|
||||
export const uploadEndpoint = {
|
||||
path: '/upload',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
await addDataAndFileToRequest(req)
|
||||
|
||||
// req.data now contains parsed body
|
||||
// req.file contains uploaded file (if multipart)
|
||||
|
||||
const result = await req.payload.create({
|
||||
collection: 'media',
|
||||
data: req.data,
|
||||
file: req.file,
|
||||
})
|
||||
|
||||
return Response.json(result)
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### CORS Headers
|
||||
|
||||
Use `headersWithCors` helper to apply config CORS settings.
|
||||
|
||||
```ts
|
||||
import { headersWithCors } from 'payload'
|
||||
|
||||
export const corsEndpoint = {
|
||||
path: '/public-data',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
const data = await fetchPublicData()
|
||||
|
||||
return Response.json(data, {
|
||||
headers: headersWithCors({
|
||||
headers: new Headers(),
|
||||
req,
|
||||
}),
|
||||
})
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Throw `APIError` with status codes for proper error responses.
|
||||
|
||||
```ts
|
||||
import { APIError } from 'payload'
|
||||
|
||||
export const validateEndpoint = {
|
||||
path: '/validate',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
const data = await req.json()
|
||||
|
||||
if (!data.email) {
|
||||
throw new APIError('Email is required', 400)
|
||||
}
|
||||
|
||||
// Validation passed
|
||||
return Response.json({ valid: true })
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Query Parameters
|
||||
|
||||
Extract query params from URL.
|
||||
|
||||
```ts
|
||||
export const searchEndpoint = {
|
||||
path: '/search',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
const url = new URL(req.url)
|
||||
const query = url.searchParams.get('q')
|
||||
const limit = parseInt(url.searchParams.get('limit') || '10')
|
||||
|
||||
const results = await req.payload.find({
|
||||
collection: 'posts',
|
||||
where: {
|
||||
title: {
|
||||
contains: query,
|
||||
},
|
||||
},
|
||||
limit,
|
||||
})
|
||||
|
||||
return Response.json(results)
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Helper Functions
|
||||
|
||||
### addDataAndFileToRequest
|
||||
|
||||
Parses request body and attaches to `req.data` and `req.file`.
|
||||
|
||||
```ts
|
||||
import { addDataAndFileToRequest } from 'payload'
|
||||
|
||||
export const endpoint = {
|
||||
path: '/process',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
await addDataAndFileToRequest(req)
|
||||
|
||||
// req.data: parsed JSON or form data
|
||||
// req.file: uploaded file (if multipart)
|
||||
|
||||
console.log(req.data) // { title: 'My Post' }
|
||||
console.log(req.file) // File object or undefined
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
**Handles:**
|
||||
|
||||
- JSON bodies (`Content-Type: application/json`)
|
||||
- Form data (`Content-Type: multipart/form-data`)
|
||||
- File uploads
|
||||
|
||||
### addLocalesToRequestFromData
|
||||
|
||||
Extracts locale from request data and validates against config.
|
||||
|
||||
```ts
|
||||
import { addLocalesToRequestFromData } from 'payload'
|
||||
|
||||
export const endpoint = {
|
||||
path: '/translate',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
await addLocalesToRequestFromData(req)
|
||||
|
||||
// req.locale: validated locale string
|
||||
// req.fallbackLocale: fallback locale string
|
||||
|
||||
const result = await req.payload.find({
|
||||
collection: 'posts',
|
||||
locale: req.locale,
|
||||
})
|
||||
|
||||
return Response.json(result)
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### headersWithCors
|
||||
|
||||
Applies CORS headers from Payload config.
|
||||
|
||||
```ts
|
||||
import { headersWithCors } from 'payload'
|
||||
|
||||
export const endpoint = {
|
||||
path: '/data',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
const data = { message: 'Hello' }
|
||||
|
||||
return Response.json(data, {
|
||||
headers: headersWithCors({
|
||||
headers: new Headers({
|
||||
'Cache-Control': 'public, max-age=3600',
|
||||
}),
|
||||
req,
|
||||
}),
|
||||
})
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Real-World Examples
|
||||
|
||||
### Multi-Tenant Login Endpoint
|
||||
|
||||
From `examples/multi-tenant`:
|
||||
|
||||
```ts
|
||||
import { APIError, generatePayloadCookie, headersWithCors } from 'payload'
|
||||
|
||||
export const externalUsersLogin = {
|
||||
path: '/login-external',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
const { email, password, tenant } = await req.json()
|
||||
|
||||
if (!email || !password || !tenant) {
|
||||
throw new APIError('Missing credentials', 400)
|
||||
}
|
||||
|
||||
// Find user with tenant constraint
|
||||
const userQuery = await req.payload.find({
|
||||
collection: 'users',
|
||||
where: {
|
||||
and: [
|
||||
{ email: { equals: email } },
|
||||
{
|
||||
or: [{ tenants: { equals: tenant } }, { 'tenants.tenant': { equals: tenant } }],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
if (!userQuery.docs.length) {
|
||||
throw new APIError('Invalid credentials', 401)
|
||||
}
|
||||
|
||||
// Authenticate user
|
||||
const result = await req.payload.login({
|
||||
collection: 'users',
|
||||
data: { email, password },
|
||||
})
|
||||
|
||||
return Response.json(result, {
|
||||
headers: headersWithCors({
|
||||
headers: new Headers({
|
||||
'Set-Cookie': generatePayloadCookie({
|
||||
collectionAuthConfig: req.payload.config.collections.find((c) => c.slug === 'users')
|
||||
.auth,
|
||||
cookiePrefix: req.payload.config.cookiePrefix,
|
||||
token: result.token,
|
||||
}),
|
||||
}),
|
||||
req,
|
||||
}),
|
||||
})
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Webhook Handler (Stripe)
|
||||
|
||||
From `packages/plugin-ecommerce`:
|
||||
|
||||
```ts
|
||||
export const webhookEndpoint = {
|
||||
path: '/webhooks',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
const body = await req.text()
|
||||
const signature = req.headers.get('stripe-signature')
|
||||
|
||||
try {
|
||||
const event = stripe.webhooks.constructEvent(body, signature, webhookSecret)
|
||||
|
||||
// Process event
|
||||
switch (event.type) {
|
||||
case 'payment_intent.succeeded':
|
||||
await handlePaymentSuccess(req.payload, event.data.object)
|
||||
break
|
||||
case 'payment_intent.failed':
|
||||
await handlePaymentFailure(req.payload, event.data.object)
|
||||
break
|
||||
}
|
||||
|
||||
return Response.json({ received: true })
|
||||
} catch (err) {
|
||||
req.payload.logger.error(`Webhook error: ${err.message}`)
|
||||
return Response.json({ error: err.message }, { status: 400 })
|
||||
}
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Data Preview Endpoint
|
||||
|
||||
From `packages/plugin-import-export`:
|
||||
|
||||
```ts
|
||||
import { addDataAndFileToRequest } from 'payload'
|
||||
|
||||
export const previewEndpoint = {
|
||||
path: '/preview',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
if (!req.user) {
|
||||
throw new APIError('Unauthorized', 401)
|
||||
}
|
||||
|
||||
await addDataAndFileToRequest(req)
|
||||
|
||||
const { collection, where, limit = 10 } = req.data
|
||||
|
||||
// Validate collection exists
|
||||
const collectionConfig = req.payload.config.collections.find((c) => c.slug === collection)
|
||||
if (!collectionConfig) {
|
||||
throw new APIError('Collection not found', 404)
|
||||
}
|
||||
|
||||
// Preview data
|
||||
const results = await req.payload.find({
|
||||
collection,
|
||||
where,
|
||||
limit,
|
||||
depth: 0,
|
||||
})
|
||||
|
||||
return Response.json({
|
||||
docs: results.docs,
|
||||
totalDocs: results.totalDocs,
|
||||
fields: collectionConfig.fields,
|
||||
})
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Reindex Action Endpoint
|
||||
|
||||
From `packages/plugin-search`:
|
||||
|
||||
```ts
|
||||
export const reindexEndpoint = (pluginConfig) => ({
|
||||
path: '/reindex',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
if (!req.user) {
|
||||
throw new APIError('Unauthorized', 401)
|
||||
}
|
||||
|
||||
const { collection } = req.routeParams
|
||||
|
||||
// Reindex collection
|
||||
const result = await reindexCollection(req.payload, collection, pluginConfig)
|
||||
|
||||
return Response.json({
|
||||
message: `Reindexed ${result.count} documents`,
|
||||
count: result.count,
|
||||
})
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Endpoint Placement
|
||||
|
||||
### Collection Endpoints
|
||||
|
||||
Mounted at `/api/{collection-slug}/{path}`.
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Orders: CollectionConfig = {
|
||||
slug: 'orders',
|
||||
fields: [
|
||||
/* ... */
|
||||
],
|
||||
endpoints: [
|
||||
{
|
||||
path: '/:id/tracking',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
// Available at: /api/orders/:id/tracking
|
||||
const orderId = req.routeParams.id
|
||||
return Response.json({ orderId })
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### Global Endpoints
|
||||
|
||||
Mounted at `/api/globals/{global-slug}/{path}`.
|
||||
|
||||
```ts
|
||||
import type { GlobalConfig } from 'payload'
|
||||
|
||||
export const Settings: GlobalConfig = {
|
||||
slug: 'settings',
|
||||
fields: [
|
||||
/* ... */
|
||||
],
|
||||
endpoints: [
|
||||
{
|
||||
path: '/clear-cache',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
// Available at: /api/globals/settings/clear-cache
|
||||
await clearCache()
|
||||
return Response.json({ message: 'Cache cleared' })
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Advanced Patterns
|
||||
|
||||
### Factory Functions
|
||||
|
||||
Create reusable endpoint factories for plugins.
|
||||
|
||||
```ts
|
||||
export const createWebhookEndpoint = (config) => ({
|
||||
path: '/webhook',
|
||||
method: 'post',
|
||||
handler: async (req) => {
|
||||
const signature = req.headers.get('x-webhook-signature')
|
||||
|
||||
if (!verifySignature(signature, config.secret)) {
|
||||
throw new APIError('Invalid signature', 401)
|
||||
}
|
||||
|
||||
const data = await req.json()
|
||||
await processWebhook(req.payload, data, config)
|
||||
|
||||
return Response.json({ received: true })
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Conditional Endpoints
|
||||
|
||||
Add endpoints based on config options.
|
||||
|
||||
```ts
|
||||
export const MyCollection: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
fields: [
|
||||
/* ... */
|
||||
],
|
||||
endpoints: [
|
||||
// Always included
|
||||
{
|
||||
path: '/public',
|
||||
method: 'get',
|
||||
handler: async (req) => Response.json({ data: [] }),
|
||||
},
|
||||
// Conditionally included
|
||||
...(process.env.ENABLE_ANALYTICS
|
||||
? [
|
||||
{
|
||||
path: '/analytics',
|
||||
method: 'get',
|
||||
handler: async (req) => Response.json({ analytics: [] }),
|
||||
},
|
||||
]
|
||||
: []),
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### OpenAPI Documentation
|
||||
|
||||
Use `custom` property for API documentation metadata.
|
||||
|
||||
```ts
|
||||
export const endpoint = {
|
||||
path: '/search',
|
||||
method: 'get',
|
||||
handler: async (req) => {
|
||||
// Handler implementation
|
||||
},
|
||||
custom: {
|
||||
openapi: {
|
||||
summary: 'Search posts',
|
||||
parameters: [
|
||||
{
|
||||
name: 'q',
|
||||
in: 'query',
|
||||
required: true,
|
||||
schema: { type: 'string' },
|
||||
},
|
||||
],
|
||||
responses: {
|
||||
200: {
|
||||
description: 'Search results',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: { type: 'array' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Always check authentication** - Custom endpoints are not authenticated by default
|
||||
2. **Use `req.payload` for operations** - Ensures access control and hooks execute
|
||||
3. **Use helpers for common tasks** - `addDataAndFileToRequest`, `headersWithCors`, etc.
|
||||
4. **Throw `APIError` for errors** - Provides consistent error responses
|
||||
5. **Return Web API `Response`** - Use `Response.json()` for consistent responses
|
||||
6. **Validate input** - Check required fields, validate types
|
||||
7. **Handle CORS** - Use `headersWithCors` for cross-origin requests
|
||||
8. **Log errors** - Use `req.payload.logger` for debugging
|
||||
9. **Document with `custom`** - Add OpenAPI metadata for API docs
|
||||
10. **Factory pattern for reuse** - Create endpoint factories for plugins
|
||||
|
||||
## Resources
|
||||
|
||||
- REST API Overview: <https://payloadcms.com/docs/rest-api/overview>
|
||||
- Custom Endpoints: <https://payloadcms.com/docs/rest-api/overview#custom-endpoints>
|
||||
- Access Control: <https://payloadcms.com/docs/access-control/overview>
|
||||
- Local API: <https://payloadcms.com/docs/local-api/overview>
|
||||
553
.claude/skills/payload/reference/FIELD-TYPE-GUARDS.md
Normal file
553
.claude/skills/payload/reference/FIELD-TYPE-GUARDS.md
Normal file
@@ -0,0 +1,553 @@
|
||||
# Payload Field Type Guards Reference
|
||||
|
||||
Complete reference with detailed examples and patterns. See [FIELDS.md](FIELDS.md#field-type-guards) for quick reference table of all guards.
|
||||
|
||||
## Structural Guards
|
||||
|
||||
### fieldHasSubFields
|
||||
|
||||
Checks if field contains nested fields (group, array, row, or collapsible).
|
||||
|
||||
```ts
|
||||
import type { Field } from 'payload'
|
||||
import { fieldHasSubFields } from 'payload'
|
||||
|
||||
function traverseFields(fields: Field[]): void {
|
||||
fields.forEach((field) => {
|
||||
if (fieldHasSubFields(field)) {
|
||||
// Safe to access field.fields
|
||||
traverseFields(field.fields)
|
||||
}
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldHasSubFields<TField extends ClientField | Field>(
|
||||
field: TField
|
||||
): field is TField & (FieldWithSubFieldsClient | FieldWithSubFields)
|
||||
```
|
||||
|
||||
**Common Pattern - Exclude Arrays:**
|
||||
|
||||
```ts
|
||||
if (fieldHasSubFields(field) && !fieldIsArrayType(field)) {
|
||||
// Groups, rows, collapsibles only (not arrays)
|
||||
}
|
||||
```
|
||||
|
||||
### fieldIsArrayType
|
||||
|
||||
Checks if field type is `'array'`.
|
||||
|
||||
```ts
|
||||
import { fieldIsArrayType } from 'payload'
|
||||
|
||||
if (fieldIsArrayType(field)) {
|
||||
// field.type === 'array'
|
||||
console.log(`Min rows: ${field.minRows}`)
|
||||
console.log(`Max rows: ${field.maxRows}`)
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldIsArrayType<TField extends ClientField | Field>(
|
||||
field: TField
|
||||
): field is TField & (ArrayFieldClient | ArrayField)
|
||||
```
|
||||
|
||||
### fieldIsBlockType
|
||||
|
||||
Checks if field type is `'blocks'`.
|
||||
|
||||
```ts
|
||||
import { fieldIsBlockType } from 'payload'
|
||||
|
||||
if (fieldIsBlockType(field)) {
|
||||
// field.type === 'blocks'
|
||||
field.blocks.forEach((block) => {
|
||||
console.log(`Block: ${block.slug}`)
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldIsBlockType<TField extends ClientField | Field>(
|
||||
field: TField
|
||||
): field is TField & (BlocksFieldClient | BlocksField)
|
||||
```
|
||||
|
||||
**Common Pattern - Distinguish Containers:**
|
||||
|
||||
```ts
|
||||
if (fieldIsArrayType(field)) {
|
||||
// Handle array rows
|
||||
} else if (fieldIsBlockType(field)) {
|
||||
// Handle block types
|
||||
}
|
||||
```
|
||||
|
||||
### fieldIsGroupType
|
||||
|
||||
Checks if field type is `'group'`.
|
||||
|
||||
```ts
|
||||
import { fieldIsGroupType } from 'payload'
|
||||
|
||||
if (fieldIsGroupType(field)) {
|
||||
// field.type === 'group'
|
||||
console.log(`Interface: ${field.interfaceName}`)
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldIsGroupType<TField extends ClientField | Field>(
|
||||
field: TField
|
||||
): field is TField & (GroupFieldClient | GroupField)
|
||||
```
|
||||
|
||||
## Capability Guards
|
||||
|
||||
### fieldSupportsMany
|
||||
|
||||
Checks if field can have multiple values (select, relationship, or upload with `hasMany`).
|
||||
|
||||
```ts
|
||||
import { fieldSupportsMany } from 'payload'
|
||||
|
||||
if (fieldSupportsMany(field)) {
|
||||
// field.type is 'select' | 'relationship' | 'upload'
|
||||
// Safe to check field.hasMany
|
||||
if (field.hasMany) {
|
||||
console.log('Field accepts multiple values')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldSupportsMany<TField extends ClientField | Field>(
|
||||
field: TField
|
||||
): field is TField & (FieldWithManyClient | FieldWithMany)
|
||||
```
|
||||
|
||||
### fieldHasMaxDepth
|
||||
|
||||
Checks if field is relationship/upload/join with numeric `maxDepth` property.
|
||||
|
||||
```ts
|
||||
import { fieldHasMaxDepth } from 'payload'
|
||||
|
||||
if (fieldHasMaxDepth(field)) {
|
||||
// field.type is 'upload' | 'relationship' | 'join'
|
||||
// AND field.maxDepth is number
|
||||
const remainingDepth = field.maxDepth - currentDepth
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldHasMaxDepth<TField extends ClientField | Field>(
|
||||
field: TField
|
||||
): field is TField & (FieldWithMaxDepthClient | FieldWithMaxDepth)
|
||||
```
|
||||
|
||||
### fieldShouldBeLocalized
|
||||
|
||||
Checks if field needs localization handling (accounts for parent localization).
|
||||
|
||||
```ts
|
||||
import { fieldShouldBeLocalized } from 'payload'
|
||||
|
||||
function processField(field: Field, parentIsLocalized: boolean) {
|
||||
if (fieldShouldBeLocalized({ field, parentIsLocalized })) {
|
||||
// Create locale-specific table or index
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldShouldBeLocalized({
|
||||
field,
|
||||
parentIsLocalized,
|
||||
}: {
|
||||
field: ClientField | ClientTab | Field | Tab
|
||||
parentIsLocalized: boolean
|
||||
}): boolean
|
||||
```
|
||||
|
||||
```ts
|
||||
// Accounts for parent localization
|
||||
if (fieldShouldBeLocalized({ field, parentIsLocalized: false })) {
|
||||
/* ... */
|
||||
}
|
||||
```
|
||||
|
||||
### fieldIsVirtual
|
||||
|
||||
Checks if field is virtual (computed or virtual relationship).
|
||||
|
||||
```ts
|
||||
import { fieldIsVirtual } from 'payload'
|
||||
|
||||
if (fieldIsVirtual(field)) {
|
||||
// field.virtual is truthy
|
||||
if (typeof field.virtual === 'string') {
|
||||
// Virtual relationship path
|
||||
console.log(`Virtual path: ${field.virtual}`)
|
||||
} else {
|
||||
// Computed virtual field (uses hooks)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldIsVirtual(field: Field | Tab): boolean
|
||||
```
|
||||
|
||||
## Data Guards
|
||||
|
||||
### fieldAffectsData
|
||||
|
||||
**Most commonly used guard.** Checks if field stores data (has name and is not UI-only).
|
||||
|
||||
```ts
|
||||
import { fieldAffectsData } from 'payload'
|
||||
|
||||
function generateSchema(fields: Field[]) {
|
||||
fields.forEach((field) => {
|
||||
if (fieldAffectsData(field)) {
|
||||
// Safe to access field.name
|
||||
schema[field.name] = getFieldType(field)
|
||||
}
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldAffectsData<TField extends ClientField | Field | TabAsField | TabAsFieldClient>(
|
||||
field: TField
|
||||
): field is TField & (FieldAffectingDataClient | FieldAffectingData)
|
||||
```
|
||||
|
||||
**Pattern - Data Fields Only:**
|
||||
|
||||
```ts
|
||||
const dataFields = fields.filter(fieldAffectsData)
|
||||
```
|
||||
|
||||
### fieldIsPresentationalOnly
|
||||
|
||||
Checks if field is UI-only (type `'ui'`).
|
||||
|
||||
```ts
|
||||
import { fieldIsPresentationalOnly } from 'payload'
|
||||
|
||||
if (fieldIsPresentationalOnly(field)) {
|
||||
// field.type === 'ui'
|
||||
// Skip in data operations, GraphQL schema, etc.
|
||||
return
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldIsPresentationalOnly<TField extends ClientField | Field | TabAsField | TabAsFieldClient>(
|
||||
field: TField
|
||||
): field is TField & (UIFieldClient | UIField)
|
||||
```
|
||||
|
||||
### fieldIsID
|
||||
|
||||
Checks if field name is exactly `'id'`.
|
||||
|
||||
```ts
|
||||
import { fieldIsID } from 'payload'
|
||||
|
||||
if (fieldIsID(field)) {
|
||||
// field.name === 'id'
|
||||
// Special handling for ID field
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldIsID<TField extends ClientField | Field>(
|
||||
field: TField
|
||||
): field is { name: 'id' } & TField
|
||||
```
|
||||
|
||||
### fieldIsHiddenOrDisabled
|
||||
|
||||
Checks if field is hidden or admin-disabled.
|
||||
|
||||
```ts
|
||||
import { fieldIsHiddenOrDisabled } from 'payload'
|
||||
|
||||
const visibleFields = fields.filter((field) => !fieldIsHiddenOrDisabled(field))
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldIsHiddenOrDisabled<TField extends ClientField | Field | TabAsField | TabAsFieldClient>(
|
||||
field: TField
|
||||
): field is { admin: { hidden: true } } & TField
|
||||
```
|
||||
|
||||
## Layout Guards
|
||||
|
||||
### fieldIsSidebar
|
||||
|
||||
Checks if field is positioned in sidebar.
|
||||
|
||||
```ts
|
||||
import { fieldIsSidebar } from 'payload'
|
||||
|
||||
const [mainFields, sidebarFields] = fields.reduce(
|
||||
([main, sidebar], field) => {
|
||||
if (fieldIsSidebar(field)) {
|
||||
return [main, [...sidebar, field]]
|
||||
}
|
||||
return [[...main, field], sidebar]
|
||||
},
|
||||
[[], []],
|
||||
)
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
fieldIsSidebar<TField extends ClientField | Field | TabAsField | TabAsFieldClient>(
|
||||
field: TField
|
||||
): field is { admin: { position: 'sidebar' } } & TField
|
||||
```
|
||||
|
||||
## Tab & Group Guards
|
||||
|
||||
### tabHasName
|
||||
|
||||
Checks if tab is named (stores data under tab name).
|
||||
|
||||
```ts
|
||||
import { tabHasName } from 'payload'
|
||||
|
||||
tabs.forEach((tab) => {
|
||||
if (tabHasName(tab)) {
|
||||
// tab.name exists
|
||||
dataPath.push(tab.name)
|
||||
}
|
||||
// Process tab.fields
|
||||
})
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
tabHasName<TField extends ClientTab | Tab>(
|
||||
tab: TField
|
||||
): tab is NamedTab & TField
|
||||
```
|
||||
|
||||
### groupHasName
|
||||
|
||||
Checks if group is named (stores data under group name).
|
||||
|
||||
```ts
|
||||
import { groupHasName } from 'payload'
|
||||
|
||||
if (groupHasName(group)) {
|
||||
// group.name exists
|
||||
return data[group.name]
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
groupHasName(group: Partial<NamedGroupFieldClient>): group is NamedGroupFieldClient
|
||||
```
|
||||
|
||||
## Option & Value Guards
|
||||
|
||||
### optionIsObject
|
||||
|
||||
Checks if option is object format `{label, value}` vs string.
|
||||
|
||||
```ts
|
||||
import { optionIsObject } from 'payload'
|
||||
|
||||
field.options.forEach((option) => {
|
||||
if (optionIsObject(option)) {
|
||||
console.log(`${option.label}: ${option.value}`)
|
||||
} else {
|
||||
console.log(option) // string value
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
optionIsObject(option: Option): option is OptionObject
|
||||
```
|
||||
|
||||
### optionsAreObjects
|
||||
|
||||
Checks if entire options array contains objects.
|
||||
|
||||
```ts
|
||||
import { optionsAreObjects } from 'payload'
|
||||
|
||||
if (optionsAreObjects(field.options)) {
|
||||
// All options are OptionObject[]
|
||||
const labels = field.options.map((opt) => opt.label)
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
optionsAreObjects(options: Option[]): options is OptionObject[]
|
||||
```
|
||||
|
||||
### optionIsValue
|
||||
|
||||
Checks if option is string value (not object).
|
||||
|
||||
```ts
|
||||
import { optionIsValue } from 'payload'
|
||||
|
||||
if (optionIsValue(option)) {
|
||||
// option is string
|
||||
const value = option
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
optionIsValue(option: Option): option is string
|
||||
```
|
||||
|
||||
### valueIsValueWithRelation
|
||||
|
||||
Checks if relationship value is polymorphic format `{relationTo, value}`.
|
||||
|
||||
```ts
|
||||
import { valueIsValueWithRelation } from 'payload'
|
||||
|
||||
if (valueIsValueWithRelation(fieldValue)) {
|
||||
// fieldValue.relationTo exists
|
||||
// fieldValue.value exists
|
||||
console.log(`Related to ${fieldValue.relationTo}: ${fieldValue.value}`)
|
||||
}
|
||||
```
|
||||
|
||||
**Signature:**
|
||||
|
||||
```ts
|
||||
valueIsValueWithRelation(value: unknown): value is ValueWithRelation
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Recursive Field Traversal
|
||||
|
||||
```ts
|
||||
import { fieldAffectsData, fieldHasSubFields } from 'payload'
|
||||
|
||||
function traverseFields(fields: Field[], callback: (field: Field) => void) {
|
||||
fields.forEach((field) => {
|
||||
if (fieldAffectsData(field)) {
|
||||
callback(field)
|
||||
}
|
||||
|
||||
if (fieldHasSubFields(field)) {
|
||||
traverseFields(field.fields, callback)
|
||||
}
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
### Filter Data-Bearing Fields
|
||||
|
||||
```ts
|
||||
import { fieldAffectsData, fieldIsPresentationalOnly, fieldIsHiddenOrDisabled } from 'payload'
|
||||
|
||||
const dataFields = fields.filter(
|
||||
(field) =>
|
||||
fieldAffectsData(field) && !fieldIsPresentationalOnly(field) && !fieldIsHiddenOrDisabled(field),
|
||||
)
|
||||
```
|
||||
|
||||
### Container Type Switching
|
||||
|
||||
```ts
|
||||
import { fieldIsArrayType, fieldIsBlockType, fieldHasSubFields } from 'payload'
|
||||
|
||||
if (fieldIsArrayType(field)) {
|
||||
// Handle array-specific logic
|
||||
} else if (fieldIsBlockType(field)) {
|
||||
// Handle blocks-specific logic
|
||||
} else if (fieldHasSubFields(field)) {
|
||||
// Handle group/row/collapsible
|
||||
}
|
||||
```
|
||||
|
||||
### Safe Property Access
|
||||
|
||||
```ts
|
||||
import { fieldSupportsMany, fieldHasMaxDepth } from 'payload'
|
||||
|
||||
// Without guard - TypeScript error
|
||||
// if (field.hasMany) { /* ... */ }
|
||||
|
||||
// With guard - safe access
|
||||
if (fieldSupportsMany(field) && field.hasMany) {
|
||||
console.log('Multiple values supported')
|
||||
}
|
||||
|
||||
if (fieldHasMaxDepth(field)) {
|
||||
const depth = field.maxDepth // TypeScript knows this is number
|
||||
}
|
||||
```
|
||||
|
||||
## Type Preservation
|
||||
|
||||
All guards preserve the original type constraint:
|
||||
|
||||
```ts
|
||||
import type { ClientField, Field } from 'payload'
|
||||
import { fieldHasSubFields } from 'payload'
|
||||
|
||||
function processServerField(field: Field) {
|
||||
if (fieldHasSubFields(field)) {
|
||||
// field is Field & FieldWithSubFields (not ClientField)
|
||||
}
|
||||
}
|
||||
|
||||
function processClientField(field: ClientField) {
|
||||
if (fieldHasSubFields(field)) {
|
||||
// field is ClientField & FieldWithSubFieldsClient
|
||||
}
|
||||
}
|
||||
```
|
||||
744
.claude/skills/payload/reference/FIELDS.md
Normal file
744
.claude/skills/payload/reference/FIELDS.md
Normal file
@@ -0,0 +1,744 @@
|
||||
# Payload CMS Field Types Reference
|
||||
|
||||
Complete reference for all Payload field types with examples.
|
||||
|
||||
## Text Field
|
||||
|
||||
```ts
|
||||
import type { TextField } from 'payload'
|
||||
|
||||
const textField: TextField = {
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
required: true,
|
||||
unique: true,
|
||||
minLength: 5,
|
||||
maxLength: 100,
|
||||
index: true,
|
||||
localized: true,
|
||||
defaultValue: 'Default Title',
|
||||
validate: (value) => Boolean(value) || 'Required',
|
||||
admin: {
|
||||
placeholder: 'Enter title...',
|
||||
position: 'sidebar',
|
||||
condition: (data) => data.showTitle === true,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Slug Field Helper
|
||||
|
||||
Built-in helper for auto-generating slugs:
|
||||
|
||||
```ts
|
||||
import { slugField } from 'payload'
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Pages: CollectionConfig = {
|
||||
slug: 'pages',
|
||||
fields: [
|
||||
{ name: 'title', type: 'text', required: true },
|
||||
slugField({
|
||||
name: 'slug', // defaults to 'slug'
|
||||
useAsSlug: 'title', // defaults to 'title'
|
||||
checkboxName: 'generateSlug', // defaults to 'generateSlug'
|
||||
localized: true,
|
||||
required: true,
|
||||
overrides: (defaultField) => {
|
||||
// Customize the generated fields if needed
|
||||
return defaultField
|
||||
},
|
||||
}),
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Rich Text (Lexical)
|
||||
|
||||
```ts
|
||||
import type { RichTextField } from 'payload'
|
||||
import { lexicalEditor } from '@payloadcms/richtext-lexical'
|
||||
import { HeadingFeature, LinkFeature } from '@payloadcms/richtext-lexical'
|
||||
|
||||
const richTextField: RichTextField = {
|
||||
name: 'content',
|
||||
type: 'richText',
|
||||
required: true,
|
||||
localized: true,
|
||||
editor: lexicalEditor({
|
||||
features: ({ defaultFeatures }) => [
|
||||
...defaultFeatures,
|
||||
HeadingFeature({
|
||||
enabledHeadingSizes: ['h1', 'h2', 'h3'],
|
||||
}),
|
||||
LinkFeature({
|
||||
enabledCollections: ['posts', 'pages'],
|
||||
}),
|
||||
],
|
||||
}),
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced Lexical Configuration
|
||||
|
||||
```ts
|
||||
import {
|
||||
BoldFeature,
|
||||
EXPERIMENTAL_TableFeature,
|
||||
FixedToolbarFeature,
|
||||
HeadingFeature,
|
||||
IndentFeature,
|
||||
InlineToolbarFeature,
|
||||
ItalicFeature,
|
||||
LinkFeature,
|
||||
OrderedListFeature,
|
||||
UnderlineFeature,
|
||||
UnorderedListFeature,
|
||||
lexicalEditor,
|
||||
} from '@payloadcms/richtext-lexical'
|
||||
|
||||
// Global editor config with full features
|
||||
export default buildConfig({
|
||||
editor: lexicalEditor({
|
||||
features: () => {
|
||||
return [
|
||||
UnderlineFeature(),
|
||||
BoldFeature(),
|
||||
ItalicFeature(),
|
||||
OrderedListFeature(),
|
||||
UnorderedListFeature(),
|
||||
LinkFeature({
|
||||
enabledCollections: ['pages'],
|
||||
fields: ({ defaultFields }) => {
|
||||
const defaultFieldsWithoutUrl = defaultFields.filter((field) => {
|
||||
if ('name' in field && field.name === 'url') return false
|
||||
return true
|
||||
})
|
||||
|
||||
return [
|
||||
...defaultFieldsWithoutUrl,
|
||||
{
|
||||
name: 'url',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: ({ linkType }) => linkType !== 'internal',
|
||||
},
|
||||
label: ({ t }) => t('fields:enterURL'),
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
},
|
||||
}),
|
||||
IndentFeature(),
|
||||
EXPERIMENTAL_TableFeature(),
|
||||
]
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
// Field-specific editor with custom toolbar
|
||||
const richTextWithToolbars: RichTextField = {
|
||||
name: 'richText',
|
||||
type: 'richText',
|
||||
editor: lexicalEditor({
|
||||
features: ({ rootFeatures }) => {
|
||||
return [
|
||||
...rootFeatures,
|
||||
HeadingFeature({ enabledHeadingSizes: ['h2', 'h3', 'h4'] }),
|
||||
FixedToolbarFeature(),
|
||||
InlineToolbarFeature(),
|
||||
]
|
||||
},
|
||||
}),
|
||||
label: false,
|
||||
}
|
||||
```
|
||||
|
||||
## Relationship
|
||||
|
||||
```ts
|
||||
import type { RelationshipField } from 'payload'
|
||||
|
||||
// Single relationship
|
||||
const singleRelationship: RelationshipField = {
|
||||
name: 'author',
|
||||
type: 'relationship',
|
||||
relationTo: 'users',
|
||||
required: true,
|
||||
maxDepth: 2,
|
||||
}
|
||||
|
||||
// Multiple relationships (hasMany)
|
||||
const multipleRelationship: RelationshipField = {
|
||||
name: 'categories',
|
||||
type: 'relationship',
|
||||
relationTo: 'categories',
|
||||
hasMany: true,
|
||||
filterOptions: {
|
||||
active: { equals: true },
|
||||
},
|
||||
}
|
||||
|
||||
// Polymorphic relationship
|
||||
const polymorphicRelationship: PolymorphicRelationshipField = {
|
||||
name: 'relatedContent',
|
||||
type: 'relationship',
|
||||
relationTo: ['posts', 'pages'],
|
||||
hasMany: true,
|
||||
}
|
||||
```
|
||||
|
||||
## Array
|
||||
|
||||
```ts
|
||||
import type { ArrayField } from 'payload'
|
||||
|
||||
const arrayField: ArrayField = {
|
||||
name: 'slides',
|
||||
type: 'array',
|
||||
minRows: 2,
|
||||
maxRows: 10,
|
||||
labels: {
|
||||
singular: 'Slide',
|
||||
plural: 'Slides',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'image',
|
||||
type: 'upload',
|
||||
relationTo: 'media',
|
||||
},
|
||||
],
|
||||
admin: {
|
||||
initCollapsed: true,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Blocks
|
||||
|
||||
```ts
|
||||
import type { BlocksField, Block } from 'payload'
|
||||
|
||||
const HeroBlock: Block = {
|
||||
slug: 'hero',
|
||||
interfaceName: 'HeroBlock',
|
||||
fields: [
|
||||
{
|
||||
name: 'heading',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'background',
|
||||
type: 'upload',
|
||||
relationTo: 'media',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const ContentBlock: Block = {
|
||||
slug: 'content',
|
||||
fields: [
|
||||
{
|
||||
name: 'text',
|
||||
type: 'richText',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const blocksField: BlocksField = {
|
||||
name: 'layout',
|
||||
type: 'blocks',
|
||||
blocks: [HeroBlock, ContentBlock],
|
||||
}
|
||||
```
|
||||
|
||||
## Select
|
||||
|
||||
```ts
|
||||
import type { SelectField } from 'payload'
|
||||
|
||||
const selectField: SelectField = {
|
||||
name: 'status',
|
||||
type: 'select',
|
||||
options: [
|
||||
{ label: 'Draft', value: 'draft' },
|
||||
{ label: 'Published', value: 'published' },
|
||||
],
|
||||
defaultValue: 'draft',
|
||||
required: true,
|
||||
}
|
||||
|
||||
// Multiple select
|
||||
const multiSelectField: SelectField = {
|
||||
name: 'tags',
|
||||
type: 'select',
|
||||
hasMany: true,
|
||||
options: ['tech', 'news', 'sports'],
|
||||
}
|
||||
```
|
||||
|
||||
## Upload
|
||||
|
||||
```ts
|
||||
import type { UploadField } from 'payload'
|
||||
|
||||
const uploadField: UploadField = {
|
||||
name: 'featuredImage',
|
||||
type: 'upload',
|
||||
relationTo: 'media',
|
||||
required: true,
|
||||
filterOptions: {
|
||||
mimeType: { contains: 'image' },
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Point (Geolocation)
|
||||
|
||||
Point fields store geographic coordinates with automatic 2dsphere indexing for geospatial queries.
|
||||
|
||||
```ts
|
||||
import type { PointField } from 'payload'
|
||||
|
||||
const locationField: PointField = {
|
||||
name: 'location',
|
||||
type: 'point',
|
||||
label: 'Location',
|
||||
required: true,
|
||||
}
|
||||
|
||||
// Returns [longitude, latitude]
|
||||
// Example: [-122.4194, 37.7749] for San Francisco
|
||||
```
|
||||
|
||||
### Geospatial Queries
|
||||
|
||||
```ts
|
||||
// Query by distance (sorted by nearest first)
|
||||
const nearbyLocations = await payload.find({
|
||||
collection: 'stores',
|
||||
where: {
|
||||
location: {
|
||||
near: [10, 20], // [longitude, latitude]
|
||||
maxDistance: 5000, // in meters
|
||||
minDistance: 1000,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Query within polygon area
|
||||
const polygon: Point[] = [
|
||||
[9.0, 19.0], // bottom-left
|
||||
[9.0, 21.0], // top-left
|
||||
[11.0, 21.0], // top-right
|
||||
[11.0, 19.0], // bottom-right
|
||||
[9.0, 19.0], // closing point
|
||||
]
|
||||
|
||||
const withinArea = await payload.find({
|
||||
collection: 'stores',
|
||||
where: {
|
||||
location: {
|
||||
within: {
|
||||
type: 'Polygon',
|
||||
coordinates: [polygon],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Query intersecting area
|
||||
const intersecting = await payload.find({
|
||||
collection: 'stores',
|
||||
where: {
|
||||
location: {
|
||||
intersects: {
|
||||
type: 'Polygon',
|
||||
coordinates: [polygon],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
**Note**: Point fields are not supported in SQLite.
|
||||
|
||||
## Join Fields
|
||||
|
||||
Join fields create reverse relationships, allowing you to access related documents from the "other side" of a relationship.
|
||||
|
||||
```ts
|
||||
import type { JoinField } from 'payload'
|
||||
|
||||
// From Users collection - show user's orders
|
||||
const ordersJoinField: JoinField = {
|
||||
name: 'orders',
|
||||
type: 'join',
|
||||
collection: 'orders',
|
||||
on: 'customer', // The field in 'orders' that references this user
|
||||
admin: {
|
||||
allowCreate: false,
|
||||
defaultColumns: ['id', 'createdAt', 'total', 'currency', 'items'],
|
||||
},
|
||||
}
|
||||
|
||||
// From Users collection - show user's cart
|
||||
const cartJoinField: JoinField = {
|
||||
name: 'cart',
|
||||
type: 'join',
|
||||
collection: 'carts',
|
||||
on: 'customer',
|
||||
admin: {
|
||||
allowCreate: false,
|
||||
defaultColumns: ['id', 'createdAt', 'total', 'currency'],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Virtual Fields
|
||||
|
||||
```ts
|
||||
import type { TextField } from 'payload'
|
||||
|
||||
// Computed from siblings
|
||||
const computedVirtualField: TextField = {
|
||||
name: 'fullName',
|
||||
type: 'text',
|
||||
virtual: true,
|
||||
hooks: {
|
||||
afterRead: [({ siblingData }) => `${siblingData.firstName} ${siblingData.lastName}`],
|
||||
},
|
||||
}
|
||||
|
||||
// From relationship path
|
||||
const pathVirtualField: TextField = {
|
||||
name: 'authorName',
|
||||
type: 'text',
|
||||
virtual: 'author.name',
|
||||
}
|
||||
```
|
||||
|
||||
## Conditional Fields
|
||||
|
||||
```ts
|
||||
import type { UploadField, CheckboxField } from 'payload'
|
||||
|
||||
// Simple boolean condition
|
||||
const enableFeatureField: CheckboxField = {
|
||||
name: 'enableFeature',
|
||||
type: 'checkbox',
|
||||
}
|
||||
|
||||
const conditionalField: TextField = {
|
||||
name: 'featureText',
|
||||
type: 'text',
|
||||
admin: {
|
||||
condition: (data) => data.enableFeature === true,
|
||||
},
|
||||
}
|
||||
|
||||
// Sibling data condition (from hero field pattern)
|
||||
const typeField: SelectField = {
|
||||
name: 'type',
|
||||
type: 'select',
|
||||
options: ['none', 'highImpact', 'mediumImpact', 'lowImpact'],
|
||||
defaultValue: 'lowImpact',
|
||||
}
|
||||
|
||||
const mediaField: UploadField = {
|
||||
name: 'media',
|
||||
type: 'upload',
|
||||
relationTo: 'media',
|
||||
admin: {
|
||||
condition: (_, { type } = {}) => ['highImpact', 'mediumImpact'].includes(type),
|
||||
},
|
||||
required: true,
|
||||
}
|
||||
```
|
||||
|
||||
## Radio
|
||||
|
||||
Radio fields present options as radio buttons for single selection.
|
||||
|
||||
```ts
|
||||
import type { RadioField } from 'payload'
|
||||
|
||||
const radioField: RadioField = {
|
||||
name: 'priority',
|
||||
type: 'radio',
|
||||
options: [
|
||||
{ label: 'Low', value: 'low' },
|
||||
{ label: 'Medium', value: 'medium' },
|
||||
{ label: 'High', value: 'high' },
|
||||
],
|
||||
defaultValue: 'medium',
|
||||
admin: {
|
||||
layout: 'horizontal', // or 'vertical'
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Row (Layout)
|
||||
|
||||
Row fields arrange fields horizontally in the admin panel (presentational only).
|
||||
|
||||
```ts
|
||||
import type { RowField } from 'payload'
|
||||
|
||||
const rowField: RowField = {
|
||||
type: 'row',
|
||||
fields: [
|
||||
{
|
||||
name: 'firstName',
|
||||
type: 'text',
|
||||
admin: { width: '50%' },
|
||||
},
|
||||
{
|
||||
name: 'lastName',
|
||||
type: 'text',
|
||||
admin: { width: '50%' },
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Collapsible (Layout)
|
||||
|
||||
Collapsible fields group fields in an expandable/collapsible section.
|
||||
|
||||
```ts
|
||||
import type { CollapsibleField } from 'payload'
|
||||
|
||||
const collapsibleField: CollapsibleField = {
|
||||
label: ({ data }) => data?.title || 'Advanced Options',
|
||||
type: 'collapsible',
|
||||
admin: {
|
||||
initCollapsed: true,
|
||||
},
|
||||
fields: [
|
||||
{ name: 'customCSS', type: 'textarea' },
|
||||
{ name: 'customJS', type: 'code' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## UI (Custom Components)
|
||||
|
||||
UI fields allow fully custom React components in the admin (no data stored).
|
||||
|
||||
```ts
|
||||
import type { UIField } from 'payload'
|
||||
|
||||
const uiField: UIField = {
|
||||
name: 'customMessage',
|
||||
type: 'ui',
|
||||
admin: {
|
||||
components: {
|
||||
Field: '/path/to/CustomFieldComponent',
|
||||
Cell: '/path/to/CustomCellComponent', // For list view
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Tabs & Groups
|
||||
|
||||
```ts
|
||||
import type { TabsField, GroupField } from 'payload'
|
||||
|
||||
// Tabs
|
||||
const tabsField: TabsField = {
|
||||
type: 'tabs',
|
||||
tabs: [
|
||||
{
|
||||
label: 'Content',
|
||||
fields: [
|
||||
{ name: 'title', type: 'text' },
|
||||
{ name: 'body', type: 'richText' },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'SEO',
|
||||
fields: [
|
||||
{ name: 'metaTitle', type: 'text' },
|
||||
{ name: 'metaDescription', type: 'textarea' },
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
// Group (named)
|
||||
const groupField: GroupField = {
|
||||
name: 'meta',
|
||||
type: 'group',
|
||||
fields: [
|
||||
{ name: 'title', type: 'text' },
|
||||
{ name: 'description', type: 'textarea' },
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Reusable Field Factories
|
||||
|
||||
Create composable field patterns that can be customized with overrides.
|
||||
|
||||
```ts
|
||||
import type { Field, GroupField } from 'payload'
|
||||
|
||||
// Utility for deep merging
|
||||
const deepMerge = <T>(target: T, source: Partial<T>): T => {
|
||||
// Implementation would deeply merge objects
|
||||
return { ...target, ...source }
|
||||
}
|
||||
|
||||
// Reusable link field factory
|
||||
type LinkType = (options?: {
|
||||
appearances?: ('default' | 'outline')[] | false
|
||||
disableLabel?: boolean
|
||||
overrides?: Record<string, unknown>
|
||||
}) => GroupField
|
||||
|
||||
export const link: LinkType = ({ appearances, disableLabel = false, overrides = {} } = {}) => {
|
||||
const linkField: GroupField = {
|
||||
name: 'link',
|
||||
type: 'group',
|
||||
admin: {
|
||||
hideGutter: true,
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
type: 'row',
|
||||
fields: [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'radio',
|
||||
options: [
|
||||
{ label: 'Internal link', value: 'reference' },
|
||||
{ label: 'Custom URL', value: 'custom' },
|
||||
],
|
||||
defaultValue: 'reference',
|
||||
admin: {
|
||||
layout: 'horizontal',
|
||||
width: '50%',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'newTab',
|
||||
type: 'checkbox',
|
||||
label: 'Open in new tab',
|
||||
admin: {
|
||||
width: '50%',
|
||||
style: {
|
||||
alignSelf: 'flex-end',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'reference',
|
||||
type: 'relationship',
|
||||
relationTo: ['pages'],
|
||||
required: true,
|
||||
maxDepth: 1,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'reference',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'url',
|
||||
type: 'text',
|
||||
label: 'Custom URL',
|
||||
required: true,
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData?.type === 'custom',
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
if (!disableLabel) {
|
||||
linkField.fields.push({
|
||||
name: 'label',
|
||||
type: 'text',
|
||||
required: true,
|
||||
})
|
||||
}
|
||||
|
||||
if (appearances !== false) {
|
||||
linkField.fields.push({
|
||||
name: 'appearance',
|
||||
type: 'select',
|
||||
defaultValue: 'default',
|
||||
options: [
|
||||
{ label: 'Default', value: 'default' },
|
||||
{ label: 'Outline', value: 'outline' },
|
||||
],
|
||||
})
|
||||
}
|
||||
|
||||
return deepMerge(linkField, overrides) as GroupField
|
||||
}
|
||||
|
||||
// Usage
|
||||
const navItem = link({ appearances: false })
|
||||
const ctaButton = link({
|
||||
overrides: {
|
||||
name: 'cta',
|
||||
admin: {
|
||||
description: 'Call to action button',
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
## Field Type Guards
|
||||
|
||||
Type guards for runtime field type checking and safe type narrowing.
|
||||
|
||||
| Type Guard | Checks For | Use When |
|
||||
| --------------------------- | ----------------------------------------------------------- | ---------------------------------------- |
|
||||
| `fieldAffectsData` | Field stores data (has name, not UI-only) | Need to access field data or name |
|
||||
| `fieldHasSubFields` | Field contains nested fields (group/array/row/collapsible) | Need to recursively traverse fields |
|
||||
| `fieldIsArrayType` | Field is array type | Distinguish arrays from other containers |
|
||||
| `fieldIsBlockType` | Field is blocks type | Handle blocks-specific logic |
|
||||
| `fieldIsGroupType` | Field is group type | Handle group-specific logic |
|
||||
| `fieldSupportsMany` | Field can have multiple values (select/relationship/upload) | Check for `hasMany` support |
|
||||
| `fieldHasMaxDepth` | Field supports population depth control | Control relationship/upload/join depth |
|
||||
| `fieldIsPresentationalOnly` | Field is UI-only (no data storage) | Exclude from data operations |
|
||||
| `fieldIsSidebar` | Field positioned in sidebar | Separate sidebar rendering |
|
||||
| `fieldIsID` | Field name is 'id' | Special ID field handling |
|
||||
| `fieldIsHiddenOrDisabled` | Field is hidden or disabled | Filter from UI operations |
|
||||
| `fieldShouldBeLocalized` | Field needs localization handling | Proper locale table checks |
|
||||
| `fieldIsVirtual` | Field is virtual (computed/no DB column) | Skip in database transforms |
|
||||
| `tabHasName` | Tab is named (stores data) | Distinguish named vs unnamed tabs |
|
||||
| `groupHasName` | Group is named (stores data) | Distinguish named vs unnamed groups |
|
||||
| `optionIsObject` | Option is `{label, value}` format | Access option properties safely |
|
||||
| `optionsAreObjects` | All options are objects | Batch option processing |
|
||||
| `optionIsValue` | Option is string value | Handle string options |
|
||||
| `valueIsValueWithRelation` | Value is polymorphic relationship | Handle polymorphic relationships |
|
||||
|
||||
```ts
|
||||
import { fieldAffectsData, fieldHasSubFields, fieldIsArrayType } from 'payload'
|
||||
|
||||
function processField(field: Field) {
|
||||
if (fieldAffectsData(field)) {
|
||||
// Safe to access field.name
|
||||
console.log(field.name)
|
||||
}
|
||||
|
||||
if (fieldHasSubFields(field)) {
|
||||
// Safe to access field.fields
|
||||
field.fields.forEach(processField)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
See [FIELD-TYPE-GUARDS.md](FIELD-TYPE-GUARDS.md) for detailed usage patterns.
|
||||
186
.claude/skills/payload/reference/HOOKS.md
Normal file
186
.claude/skills/payload/reference/HOOKS.md
Normal file
@@ -0,0 +1,186 @@
|
||||
# Payload CMS Hooks Reference
|
||||
|
||||
Complete reference for collection hooks, field hooks, and hook context patterns.
|
||||
|
||||
## Collection Hooks
|
||||
|
||||
```ts
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
hooks: {
|
||||
// Before validation
|
||||
beforeValidate: [
|
||||
async ({ data, operation }) => {
|
||||
if (operation === 'create') {
|
||||
data.slug = slugify(data.title)
|
||||
}
|
||||
return data
|
||||
},
|
||||
],
|
||||
|
||||
// Before save
|
||||
beforeChange: [
|
||||
async ({ data, req, operation, originalDoc }) => {
|
||||
if (operation === 'update' && data.status === 'published') {
|
||||
data.publishedAt = new Date()
|
||||
}
|
||||
return data
|
||||
},
|
||||
],
|
||||
|
||||
// After save
|
||||
afterChange: [
|
||||
async ({ doc, req, operation, previousDoc }) => {
|
||||
if (operation === 'create') {
|
||||
await sendNotification(doc)
|
||||
}
|
||||
return doc
|
||||
},
|
||||
],
|
||||
|
||||
// After read
|
||||
afterRead: [
|
||||
async ({ doc, req }) => {
|
||||
doc.viewCount = await getViewCount(doc.id)
|
||||
return doc
|
||||
},
|
||||
],
|
||||
|
||||
// Before delete
|
||||
beforeDelete: [
|
||||
async ({ req, id }) => {
|
||||
await cleanupRelatedData(id)
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Field Hooks
|
||||
|
||||
```ts
|
||||
import type { EmailField, FieldHook } from 'payload'
|
||||
|
||||
const beforeValidateHook: FieldHook = ({ value }) => {
|
||||
return value.trim().toLowerCase()
|
||||
}
|
||||
|
||||
const afterReadHook: FieldHook = ({ value, req }) => {
|
||||
// Hide email from non-admins
|
||||
if (!req.user?.roles?.includes('admin')) {
|
||||
return value.replace(/(.{2})(.*)(@.*)/, '$1***$3')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
const emailField: EmailField = {
|
||||
name: 'email',
|
||||
type: 'email',
|
||||
hooks: {
|
||||
beforeValidate: [beforeValidateHook],
|
||||
afterRead: [afterReadHook],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Hook Context
|
||||
|
||||
Share data between hooks or control hook behavior using request context:
|
||||
|
||||
```ts
|
||||
import type { CollectionConfig } from 'payload'
|
||||
|
||||
export const Posts: CollectionConfig = {
|
||||
slug: 'posts',
|
||||
hooks: {
|
||||
beforeChange: [
|
||||
async ({ context }) => {
|
||||
context.expensiveData = await fetchExpensiveData()
|
||||
},
|
||||
],
|
||||
afterChange: [
|
||||
async ({ context, doc }) => {
|
||||
// Reuse from previous hook
|
||||
await processData(doc, context.expensiveData)
|
||||
},
|
||||
],
|
||||
},
|
||||
fields: [{ name: 'title', type: 'text' }],
|
||||
}
|
||||
```
|
||||
|
||||
## Next.js Revalidation with Context Control
|
||||
|
||||
```ts
|
||||
import type { CollectionAfterChangeHook, CollectionAfterDeleteHook } from 'payload'
|
||||
import { revalidatePath } from 'next/cache'
|
||||
import type { Page } from '../payload-types'
|
||||
|
||||
export const revalidatePage: CollectionAfterChangeHook<Page> = ({
|
||||
doc,
|
||||
previousDoc,
|
||||
req: { payload, context },
|
||||
}) => {
|
||||
if (!context.disableRevalidate) {
|
||||
if (doc._status === 'published') {
|
||||
const path = doc.slug === 'home' ? '/' : `/${doc.slug}`
|
||||
payload.logger.info(`Revalidating page at path: ${path}`)
|
||||
revalidatePath(path)
|
||||
}
|
||||
|
||||
// Revalidate old path if unpublished
|
||||
if (previousDoc?._status === 'published' && doc._status !== 'published') {
|
||||
const oldPath = previousDoc.slug === 'home' ? '/' : `/${previousDoc.slug}`
|
||||
payload.logger.info(`Revalidating old page at path: ${oldPath}`)
|
||||
revalidatePath(oldPath)
|
||||
}
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
export const revalidateDelete: CollectionAfterDeleteHook<Page> = ({ doc, req: { context } }) => {
|
||||
if (!context.disableRevalidate) {
|
||||
const path = doc?.slug === 'home' ? '/' : `/${doc?.slug}`
|
||||
revalidatePath(path)
|
||||
}
|
||||
return doc
|
||||
}
|
||||
```
|
||||
|
||||
## Date Field Auto-Set
|
||||
|
||||
Automatically set date when document is published:
|
||||
|
||||
```ts
|
||||
import type { DateField } from 'payload'
|
||||
|
||||
const publishedOnField: DateField = {
|
||||
name: 'publishedOn',
|
||||
type: 'date',
|
||||
admin: {
|
||||
date: {
|
||||
pickerAppearance: 'dayAndTime',
|
||||
},
|
||||
position: 'sidebar',
|
||||
},
|
||||
hooks: {
|
||||
beforeChange: [
|
||||
({ siblingData, value }) => {
|
||||
if (siblingData._status === 'published' && !value) {
|
||||
return new Date()
|
||||
}
|
||||
return value
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
## Hook Patterns Best Practices
|
||||
|
||||
- Use `beforeValidate` for data formatting
|
||||
- Use `beforeChange` for business logic
|
||||
- Use `afterChange` for side effects
|
||||
- Use `afterRead` for computed fields
|
||||
- Store expensive operations in `context`
|
||||
- Pass `req` to nested operations for transaction safety (see [ADAPTERS.md#threading-req-through-operations](ADAPTERS.md#threading-req-through-operations))
|
||||
1436
.claude/skills/payload/reference/PLUGIN-DEVELOPMENT.md
Normal file
1436
.claude/skills/payload/reference/PLUGIN-DEVELOPMENT.md
Normal file
File diff suppressed because it is too large
Load Diff
274
.claude/skills/payload/reference/QUERIES.md
Normal file
274
.claude/skills/payload/reference/QUERIES.md
Normal file
@@ -0,0 +1,274 @@
|
||||
# Payload CMS Querying Reference
|
||||
|
||||
Complete reference for querying data across Local API, REST, and GraphQL.
|
||||
|
||||
## Query Operators
|
||||
|
||||
```ts
|
||||
import type { Where } from 'payload'
|
||||
|
||||
// Equals
|
||||
const equalsQuery: Where = { color: { equals: 'blue' } }
|
||||
|
||||
// Not equals
|
||||
const notEqualsQuery: Where = { status: { not_equals: 'draft' } }
|
||||
|
||||
// Greater/less than
|
||||
const greaterThanQuery: Where = { price: { greater_than: 100 } }
|
||||
const lessThanEqualQuery: Where = { age: { less_than_equal: 65 } }
|
||||
|
||||
// Contains (case-insensitive)
|
||||
const containsQuery: Where = { title: { contains: 'payload' } }
|
||||
|
||||
// Like (all words present)
|
||||
const likeQuery: Where = { description: { like: 'cms headless' } }
|
||||
|
||||
// In/not in
|
||||
const inQuery: Where = { category: { in: ['tech', 'news'] } }
|
||||
|
||||
// Exists
|
||||
const existsQuery: Where = { image: { exists: true } }
|
||||
|
||||
// Near (point fields)
|
||||
const nearQuery: Where = { location: { near: '-122.4194,37.7749,10000' } }
|
||||
```
|
||||
|
||||
## AND/OR Logic
|
||||
|
||||
```ts
|
||||
import type { Where } from 'payload'
|
||||
|
||||
const complexQuery: Where = {
|
||||
or: [
|
||||
{ color: { equals: 'mint' } },
|
||||
{
|
||||
and: [{ color: { equals: 'white' } }, { featured: { equals: false } }],
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
## Nested Properties
|
||||
|
||||
```ts
|
||||
import type { Where } from 'payload'
|
||||
|
||||
const nestedQuery: Where = {
|
||||
'author.role': { equals: 'editor' },
|
||||
'meta.featured': { exists: true },
|
||||
}
|
||||
```
|
||||
|
||||
## Local API
|
||||
|
||||
```ts
|
||||
// Find documents
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
where: {
|
||||
status: { equals: 'published' },
|
||||
'author.name': { contains: 'john' },
|
||||
},
|
||||
depth: 2,
|
||||
limit: 10,
|
||||
page: 1,
|
||||
sort: '-createdAt',
|
||||
locale: 'en',
|
||||
select: {
|
||||
title: true,
|
||||
author: true,
|
||||
},
|
||||
})
|
||||
|
||||
// Find by ID
|
||||
const post = await payload.findByID({
|
||||
collection: 'posts',
|
||||
id: '123',
|
||||
depth: 2,
|
||||
})
|
||||
|
||||
// Create
|
||||
const post = await payload.create({
|
||||
collection: 'posts',
|
||||
data: {
|
||||
title: 'New Post',
|
||||
status: 'draft',
|
||||
},
|
||||
})
|
||||
|
||||
// Update
|
||||
await payload.update({
|
||||
collection: 'posts',
|
||||
id: '123',
|
||||
data: {
|
||||
status: 'published',
|
||||
},
|
||||
})
|
||||
|
||||
// Delete
|
||||
await payload.delete({
|
||||
collection: 'posts',
|
||||
id: '123',
|
||||
})
|
||||
|
||||
// Count
|
||||
const count = await payload.count({
|
||||
collection: 'posts',
|
||||
where: {
|
||||
status: { equals: 'published' },
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Threading req Parameter
|
||||
|
||||
When performing operations in hooks or nested operations, pass the `req` parameter to maintain transaction context:
|
||||
|
||||
```ts
|
||||
// ✅ CORRECT: Pass req for transaction safety
|
||||
const afterChange: CollectionAfterChangeHook = async ({ doc, req }) => {
|
||||
await req.payload.create({
|
||||
collection: 'audit-log',
|
||||
data: { action: 'created', docId: doc.id },
|
||||
req, // Maintains transaction atomicity
|
||||
})
|
||||
}
|
||||
|
||||
// ❌ WRONG: Missing req breaks transaction
|
||||
const afterChange: CollectionAfterChangeHook = async ({ doc, req }) => {
|
||||
await req.payload.create({
|
||||
collection: 'audit-log',
|
||||
data: { action: 'created', docId: doc.id },
|
||||
// Missing req - runs in separate transaction
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
This is critical for MongoDB replica sets and Postgres. See [ADAPTERS.md#threading-req-through-operations](ADAPTERS.md#threading-req-through-operations) for details.
|
||||
|
||||
### Access Control in Local API
|
||||
|
||||
**Important**: Local API bypasses access control by default (`overrideAccess: true`). When passing a `user` parameter, you must explicitly set `overrideAccess: false` to respect that user's permissions.
|
||||
|
||||
```ts
|
||||
// ❌ WRONG: User is passed but access control is bypassed
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
user: currentUser,
|
||||
// Missing: overrideAccess: false
|
||||
// Result: Operation runs with ADMIN privileges, ignoring user's permissions
|
||||
})
|
||||
|
||||
// ✅ CORRECT: Respects user's access control permissions
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
user: currentUser,
|
||||
overrideAccess: false, // Required to enforce access control
|
||||
// Result: User only sees posts they have permission to read
|
||||
})
|
||||
|
||||
// Administrative operation (intentionally bypass access control)
|
||||
const allPosts = await payload.find({
|
||||
collection: 'posts',
|
||||
// No user parameter
|
||||
// overrideAccess defaults to true
|
||||
// Result: Returns all posts regardless of access control
|
||||
})
|
||||
```
|
||||
|
||||
**When to use `overrideAccess: false`:**
|
||||
|
||||
- Performing operations on behalf of a user
|
||||
- Testing access control logic
|
||||
- API routes that should respect user permissions
|
||||
- Any operation where `user` parameter is provided
|
||||
|
||||
**When `overrideAccess: true` is appropriate:**
|
||||
|
||||
- Administrative operations (migrations, seeds, cron jobs)
|
||||
- Internal system operations
|
||||
- Operations explicitly intended to bypass access control
|
||||
|
||||
See [ACCESS-CONTROL.md#important-notes](ACCESS-CONTROL.md#important-notes) for more details.
|
||||
|
||||
## REST API
|
||||
|
||||
```ts
|
||||
import { stringify } from 'qs-esm'
|
||||
|
||||
const query = {
|
||||
status: { equals: 'published' },
|
||||
}
|
||||
|
||||
const queryString = stringify(
|
||||
{
|
||||
where: query,
|
||||
depth: 2,
|
||||
limit: 10,
|
||||
},
|
||||
{ addQueryPrefix: true },
|
||||
)
|
||||
|
||||
const response = await fetch(`https://api.example.com/api/posts${queryString}`)
|
||||
const data = await response.json()
|
||||
```
|
||||
|
||||
### REST Endpoints
|
||||
|
||||
```txt
|
||||
GET /api/{collection} - Find documents
|
||||
GET /api/{collection}/{id} - Find by ID
|
||||
POST /api/{collection} - Create
|
||||
PATCH /api/{collection}/{id} - Update
|
||||
DELETE /api/{collection}/{id} - Delete
|
||||
GET /api/{collection}/count - Count documents
|
||||
|
||||
GET /api/globals/{slug} - Get global
|
||||
POST /api/globals/{slug} - Update global
|
||||
```
|
||||
|
||||
## GraphQL
|
||||
|
||||
```graphql
|
||||
query {
|
||||
Posts(where: { status: { equals: published } }, limit: 10, sort: "-createdAt") {
|
||||
docs {
|
||||
id
|
||||
title
|
||||
author {
|
||||
name
|
||||
}
|
||||
}
|
||||
totalDocs
|
||||
hasNextPage
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
createPost(data: { title: "New Post", status: draft }) {
|
||||
id
|
||||
title
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
updatePost(id: "123", data: { status: published }) {
|
||||
id
|
||||
status
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
deletePost(id: "123") {
|
||||
id
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Best Practices
|
||||
|
||||
- Set `maxDepth` on relationships to prevent over-fetching
|
||||
- Use `select` to limit returned fields
|
||||
- Index frequently queried fields
|
||||
- Use `virtual` fields for computed data
|
||||
- Cache expensive operations in hook `context`
|
||||
@@ -47,3 +47,12 @@ Thumbs.db
|
||||
docker
|
||||
Dockerfile
|
||||
.dockerignore
|
||||
|
||||
# Docs
|
||||
docs
|
||||
README.md
|
||||
|
||||
# AI Stuff
|
||||
.claude
|
||||
AGENTS.md
|
||||
CLAUDE.md
|
||||
|
||||
36
.env.example
36
.env.example
@@ -6,6 +6,8 @@
|
||||
## Next.js ##
|
||||
NODE_ENV=
|
||||
SENTRY_AUTH_TOKEN=
|
||||
PAYLOAD_SECRET= # openssl rand -hex 32 | wl-copy
|
||||
PAYLOAD_DB_URL= # postgresql://user:password@host:5432/db_name
|
||||
NEXT_PUBLIC_SITE_URL=https://example.com
|
||||
NEXT_PUBLIC_CONVEX_URL=https://api.convex.example.com # convex-backend:3210
|
||||
NEXT_PUBLIC_PLAUSIBLE_URL=https://plausible.example.com
|
||||
@@ -13,9 +15,6 @@ NEXT_PUBLIC_SENTRY_DSN=
|
||||
NEXT_PUBLIC_SENTRY_URL=https://sentry.example.com
|
||||
NEXT_PUBLIC_SENTRY_ORG=sentry
|
||||
NEXT_PUBLIC_SENTRY_PROJECT_NAME=example
|
||||
## Payload CMS ##
|
||||
PAYLOAD_SECRET=
|
||||
PAYLOAD_DB_URL=
|
||||
|
||||
## Convex ##
|
||||
CONVEX_SELF_HOSTED_URL=https://api.convex.example.com # convex-backend:3210
|
||||
@@ -24,7 +23,36 @@ CONVEX_SELF_HOSTED_ADMIN_KEY= # Generate after hosted on docker
|
||||
CONVEX_SITE_URL=http://localhost:3000 # Always localhost:3000 for local dev; update in Convex Dashboard for production
|
||||
USESEND_API_KEY=
|
||||
USESEND_URL=https://usesend.example.com
|
||||
USESEND_FROM_EMAIL=My App <noreply@example.com>
|
||||
USESEND_FROM_EMAIL=Convex Admin <admin@convexmonorepo.gbrown.org>
|
||||
AUTH_AUTHENTIK_ID=
|
||||
AUTH_AUTHENTIK_SECRET=
|
||||
AUTH_AUTHENTIK_ISSUER=
|
||||
|
||||
## Docker Compose Variables for Next App ##
|
||||
NETWORK=nginx-bridge
|
||||
NEXT_CONTAINER_NAME=convexmonorepo
|
||||
NEXT_DOMAIN=convexmonorepo.gbrown.org
|
||||
#NEXT_PORT=
|
||||
|
||||
## Docker Compose Variables for Self hosted Convex ##
|
||||
BACKEND_TAG=latest
|
||||
DASHBOARD_TAG=latest
|
||||
BACKEND_CONTAINER_NAME=convex-backend
|
||||
DASHBOARD_CONTAINER_NAME=convex-dashboard
|
||||
BACKEND_DOMAIN=convex.convexmonorepo.gbrown.org
|
||||
DASHBOARD_DOMAIN=dashboard.convexmonorepo.gbrown.org
|
||||
INSTANCE_NAME=convex
|
||||
#INSTANCE_SECRET=
|
||||
CONVEX_CLOUD_ORIGIN=https://api.convexmonorepo.gbrown.org
|
||||
CONVEX_SITE_ORIGIN=https://convex.convexmonorepo.gbrown.org
|
||||
NEXT_PUBLIC_DEPLOYMENT_URL=https://api.convexmonorepo.gbrown.org
|
||||
DISABLE_BEACON=true
|
||||
REDACT_LOGS_TO_CLIENT=true
|
||||
DO_NOT_REQUIRE_SSL=true
|
||||
POSTGRES_URL= #postgresql://user:password@host:5432/db_name
|
||||
#BACKEND_PORT=
|
||||
#DASHBOARD_PORT
|
||||
#SITE_PROXY_PORT=
|
||||
#ACTIONS_USER_TIMEOUT_SECS=
|
||||
#RUST_LOG=
|
||||
#RUST_BACKTRACE=
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
# Next Envrionment Variables
|
||||
NETWORK=nginx-bridge
|
||||
NEXT_CONTAINER_NAME=next-app
|
||||
NEXT_DOMAIN_NAME=gbrown.org
|
||||
NEXT_PORT=3000
|
||||
NODE_ENV=production
|
||||
SENTRY_AUTH_TOKEN=
|
||||
NEXT_PUBLIC_SITE_URL=https://gbrown.org
|
||||
NEXT_PUBLIC_CONVEX_URL=https://api.convex.gbrown.org
|
||||
NEXT_PUBLIC_PLAUSIBLE_URL=https://plausible.gbrown.org
|
||||
NEXT_PUBLIC_SENTRY_DSN=
|
||||
NEXT_PUBLIC_SENTRY_ORG=sentry
|
||||
NEXT_PUBLIC_SENTRY_PROJECT_NAME=
|
||||
PAYLOAD_SECRET=
|
||||
PAYLOAD_DB_URL=
|
||||
|
||||
# Convex Environment Variables
|
||||
BACKEND_TAG=latest
|
||||
BACKEND_CONTAINER_NAME=convex-backend
|
||||
BACKEND_DOMAIN_NAME=convex.gbrown.org
|
||||
#BACKEND_PORT=
|
||||
#SITE_PROXY_PORT=
|
||||
DASHBOARD_TAG=latest
|
||||
DASHBOARD_CONTAINER_NAME=convex-dashboard
|
||||
DASHBOARD_DOMAIN=dashboard.convex.gbrown.org
|
||||
#DASHBOARD_PORT
|
||||
INSTANCE_NAME=convex
|
||||
#INSTANCE_SECRET=
|
||||
CONVEX_CLOUD_ORIGIN=https://api.convex.gbrown.org
|
||||
CONVEX_SITE_ORIGIN=https://convex.gbrown.org
|
||||
DISABLE_BEACON=true
|
||||
REDACT_LOGS_TO_CLIENT=true
|
||||
DO_NOT_REQUIRE_SSL=true
|
||||
NEXT_PUBLIC_DEPLOYMENT_URL=https://api.convex.gbrown.org
|
||||
POSTGRES_URL=
|
||||
#DATABASE_URL=
|
||||
#CONVEX_RELEASE_VERSION_DEV=
|
||||
#ACTIONS_USER_TIMEOUT_SECS=
|
||||
#MYSQL_URL=
|
||||
#RUST_LOG=
|
||||
#RUST_BACKTRACE=
|
||||
@@ -22,9 +22,9 @@ services:
|
||||
- PAYLOAD_SECRET=${PAYLOAD_SECRET}
|
||||
- PAYLOAD_DB_URL=${PAYLOAD_DB_URL}
|
||||
hostname: ${NEXT_CONTAINER_NAME}
|
||||
domainname: ${NEXT_DOMAIN_NAME}
|
||||
domainname: ${NEXT_DOMAIN}
|
||||
networks: ['${NETWORK:-nginx-bridge}']
|
||||
#ports: ['${NEXT_PORT}:3000']
|
||||
#ports: ['${NEXT_PORT}:${NEXT_PORT}']
|
||||
depends_on: ['convex-backend']
|
||||
tty: true
|
||||
stdin_open: true
|
||||
@@ -34,7 +34,7 @@ services:
|
||||
image: ghcr.io/get-convex/convex-backend:${BACKEND_TAG:-latest}
|
||||
container_name: ${BACKEND_CONTAINER_NAME:-convex-backend}
|
||||
hostname: ${BACKEND_CONTAINER_NAME:-convex-backend}
|
||||
domainname: ${BACKEND_DOMAIN_NAME:-convex.gbrown.org}
|
||||
domainname: ${BACKEND_DOMAIN:-convex.gbrown.org}
|
||||
networks: ['${NETWORK:-nginx-bridge}']
|
||||
#user: '1000:1000'
|
||||
#ports: ['${BACKEND_PORT:-3210}:3210','${SITE_PROXY_PORT:-3211}:3211']
|
||||
@@ -63,7 +63,7 @@ services:
|
||||
image: ghcr.io/get-convex/convex-dashboard:${DASHBOARD_TAG:-latest}
|
||||
container_name: ${DASHBOARD_CONTAINER_NAME:-convex-dashboard}
|
||||
hostname: ${DASHBOARD_CONTAINER_NAME:-convex-dashboard}
|
||||
domainname: ${DASHBOARD_DOMAIN_NAME:-dashboard.${BACKEND_DOMAIN_NAME:-convex.gbrown.org}}
|
||||
domainname: ${DASHBOARD_DOMAIN:-dashboard.${BACKEND_DOMAIN:-convex.gbrown.org}}
|
||||
networks: ['${NETWORK:-nginx-bridge}']
|
||||
#user: 1000:1000
|
||||
#ports: ['${DASHBOARD_PORT:-6791}:6791']
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
source ./.env
|
||||
sudo docker compose exec ${BACKEND_CONTAINER_NAME} ./generate_admin_key.sh
|
||||
653
docs/payload-cms.md
Normal file
653
docs/payload-cms.md
Normal file
@@ -0,0 +1,653 @@
|
||||
# Payload CMS In St Pete IT
|
||||
|
||||
This document explains how Payload CMS is integrated into the repo, how the landing and
|
||||
service pages work, how to add new Payload-managed pages, and how to migrate existing
|
||||
hardcoded pages into the current block-based setup.
|
||||
|
||||
## What Payload Is Responsible For
|
||||
|
||||
Payload currently manages the editable marketing content layer inside the Next.js app.
|
||||
That means:
|
||||
|
||||
- the landing page at `/`
|
||||
- the contact page at `/contact`
|
||||
- the service pages at `/services/[slug]`
|
||||
- shared marketing settings in the `site-settings` global
|
||||
- the admin UI at `/admin`
|
||||
- the REST API under `/api`
|
||||
- live preview and route refresh when editors save or publish changes
|
||||
|
||||
Payload is not replacing Convex. Convex still handles the product backend: auth,
|
||||
tickets, invoices, appointments, portal data, and admin operations. Payload only owns
|
||||
the CMS side for page content.
|
||||
|
||||
## High-Level Architecture
|
||||
|
||||
### Core config
|
||||
|
||||
Payload is configured in `apps/next/src/payload.config.ts`.
|
||||
|
||||
Important pieces there:
|
||||
|
||||
- `postgresAdapter(...)` points Payload at Postgres through `PAYLOAD_DB_URL`
|
||||
- `secret: env.PAYLOAD_SECRET` enables Payload auth/session security
|
||||
- `collections: [Media, Pages]` registers the current CMS collections
|
||||
- `globals: [SiteSettings]` registers shared settings
|
||||
- `admin.livePreview` enables live preview for the `pages` collection
|
||||
- `typescript.outputFile` writes generated types to `apps/next/payload-types.ts`
|
||||
|
||||
### Collections and globals
|
||||
|
||||
Current CMS entities:
|
||||
|
||||
- `pages` in `apps/next/src/payload/collections/pages.ts`
|
||||
- stores both the landing page and service pages
|
||||
- uses `pageType` to distinguish `landing` vs `service`
|
||||
- stores block layout in `layout`
|
||||
- stores SEO fields in `seo`
|
||||
- stores service-specific structured data in `structuredData`
|
||||
- `media` in `apps/next/src/payload/collections/media.ts`
|
||||
- image uploads used by blocks and SEO
|
||||
- `site-settings` in `apps/next/src/payload/globals/site-settings.ts`
|
||||
- shared business info and service-page CTA settings
|
||||
|
||||
### Block system
|
||||
|
||||
Payload page content is built from reusable blocks.
|
||||
|
||||
Schema side:
|
||||
|
||||
- `apps/next/src/payload/blocks/*.ts`
|
||||
- exported via `apps/next/src/payload/blocks/index.ts`
|
||||
|
||||
Render side:
|
||||
|
||||
- `apps/next/src/components/payload/blocks/*.tsx`
|
||||
- selected by `apps/next/src/components/payload/blocks/render-blocks.tsx`
|
||||
|
||||
The rule is simple: every Payload block needs both parts.
|
||||
|
||||
- schema block: defines the fields editors can fill in
|
||||
- renderer block: turns that block data into frontend UI
|
||||
|
||||
If one side is missing, the admin or the frontend will be incomplete.
|
||||
|
||||
### Frontend route flow
|
||||
|
||||
Landing page route:
|
||||
|
||||
- `apps/next/src/app/(frontend)/page.tsx`
|
||||
|
||||
Contact page route:
|
||||
|
||||
- `apps/next/src/app/(frontend)/contact/page.tsx`
|
||||
|
||||
Service page route:
|
||||
|
||||
- `apps/next/src/app/(frontend)/services/[slug]/page.tsx`
|
||||
|
||||
Shared server fetch helpers:
|
||||
|
||||
- `apps/next/src/lib/payload-helpers.tsx`
|
||||
|
||||
Behavior:
|
||||
|
||||
1. the route calls `getPageBySlug(...)`
|
||||
2. Payload fetches the matching `pages` document
|
||||
3. the page metadata is generated from `seo` / fallback values
|
||||
4. the page content is rendered through `LivePreviewPage`
|
||||
5. `LivePreviewPage` uses Payload live preview to update content in the editor iframe
|
||||
6. `RefreshRouteOnSave` refreshes the route after save/publish so server-rendered data
|
||||
stays in sync
|
||||
|
||||
### Live preview and publish behavior
|
||||
|
||||
There are two cooperating pieces:
|
||||
|
||||
- `apps/next/src/components/payload/live-preview-page.tsx`
|
||||
- subscribes to Payload live preview messages with `useLivePreview`
|
||||
- `apps/next/src/components/payload/refresh-route-on-save.tsx`
|
||||
- refreshes the current route after document saves/publishes
|
||||
|
||||
Important requirement:
|
||||
|
||||
- `src/proxy.ts` and `src/lib/proxy/ban-sus-ips.ts` must not block valid Payload REST API
|
||||
requests under `/api`
|
||||
|
||||
That was a real bug during setup: `PATCH` requests to publish pages were being blocked by
|
||||
the suspicious-method middleware until `/api` writes were explicitly allowed.
|
||||
|
||||
## Seeded Content
|
||||
|
||||
Payload content is seeded from:
|
||||
|
||||
- `apps/next/src/payload/seed/landing-page.ts`
|
||||
- `apps/next/src/payload/seed/service-pages.ts`
|
||||
- `apps/next/src/payload/seed/index.ts`
|
||||
|
||||
Run the seed with:
|
||||
|
||||
```bash
|
||||
cd apps/next
|
||||
bun run seed
|
||||
```
|
||||
|
||||
What it does:
|
||||
|
||||
- updates `site-settings`
|
||||
- creates or updates the `home` landing page
|
||||
- creates or updates the `contact` page
|
||||
- creates or updates the default service pages
|
||||
|
||||
This matters because a fresh Payload database will otherwise return no page documents and
|
||||
the frontend route will 404.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Payload depends on these env vars:
|
||||
|
||||
- `PAYLOAD_SECRET`
|
||||
- `PAYLOAD_DB_URL`
|
||||
- `NEXT_PUBLIC_SITE_URL`
|
||||
|
||||
Why they matter:
|
||||
|
||||
- `PAYLOAD_SECRET` secures Payload sessions and server behavior
|
||||
- `PAYLOAD_DB_URL` connects Payload to Postgres
|
||||
- `NEXT_PUBLIC_SITE_URL` is used by live preview to target the frontend correctly
|
||||
|
||||
If live preview points to the wrong place, or publish/save requests appear to work but the
|
||||
preview never updates, this is one of the first things to check.
|
||||
|
||||
## How To Create A New Page Like The Current Ones
|
||||
|
||||
This section assumes you want another page managed by the existing `pages` collection.
|
||||
|
||||
### Option A: Create a new service page using the existing system
|
||||
|
||||
This is the simplest case.
|
||||
|
||||
1. Open Payload admin at `/admin`
|
||||
2. Go to the `Pages` collection
|
||||
3. Create a new document
|
||||
4. Set:
|
||||
- `title`
|
||||
- `slug`
|
||||
- `pageType = service`
|
||||
5. Build the `layout` using the existing blocks
|
||||
6. Fill in `seo`
|
||||
7. Fill in `structuredData.serviceName` and `structuredData.serviceDescription`
|
||||
8. Save draft or publish
|
||||
9. Visit `/services/<slug>`
|
||||
|
||||
Why this works without adding a new route:
|
||||
|
||||
- the app already has a dynamic route at `apps/next/src/app/(frontend)/services/[slug]/page.tsx`
|
||||
- any `pages` doc with `pageType: 'service'` and a matching slug can render there
|
||||
|
||||
If the page should exist by default in new environments, also add it to
|
||||
`apps/next/src/payload/seed/service-pages.ts`.
|
||||
|
||||
### Option B: Create another landing-style page with the same block approach
|
||||
|
||||
If the page is not a service page, decide whether it belongs:
|
||||
|
||||
- in the existing `pages` collection with a new route, or
|
||||
- in a new Payload collection if the content model is materially different
|
||||
|
||||
If it fits `pages`:
|
||||
|
||||
1. add or reuse blocks in the `layout`
|
||||
2. create the frontend route that fetches the document by slug
|
||||
3. generate metadata from the document
|
||||
4. render the layout with `LivePreviewPage`
|
||||
5. include `RefreshRouteOnSave`
|
||||
|
||||
Example pattern:
|
||||
|
||||
```tsx
|
||||
const page = await getPageBySlug('some-slug');
|
||||
|
||||
if (!page) return notFound();
|
||||
|
||||
return (
|
||||
<main>
|
||||
<RefreshRouteOnSave serverURL={env.NEXT_PUBLIC_SITE_URL} />
|
||||
<LivePreviewPage page={page} serverURL={env.NEXT_PUBLIC_SITE_URL} />
|
||||
</main>
|
||||
);
|
||||
```
|
||||
|
||||
## Copy-Paste Route Template
|
||||
|
||||
Use this when creating a new non-service Payload-backed page route.
|
||||
|
||||
Adjust these parts:
|
||||
|
||||
- the slug passed to `getPageBySlug(...)`
|
||||
- the metadata fallback values
|
||||
- any JSON-LD you want to inject
|
||||
|
||||
```tsx
|
||||
import type { Metadata } from 'next';
|
||||
import { notFound } from 'next/navigation';
|
||||
import Script from 'next/script';
|
||||
import { LivePreviewPage } from '@/components/payload/live-preview-page';
|
||||
import { RefreshRouteOnSave } from '@/components/payload/refresh-route-on-save';
|
||||
import { env } from '@/env';
|
||||
import { generatePageMetadata } from '@/lib/metadata';
|
||||
import { getPageBySlug } from '@/lib/payload-helpers';
|
||||
import { jsonLd } from '@/lib/structured-data';
|
||||
|
||||
export const generateMetadata = async (): Promise<Metadata> => {
|
||||
const page = await getPageBySlug('some-slug');
|
||||
|
||||
if (!page) {
|
||||
return generatePageMetadata({
|
||||
title: 'Fallback Title',
|
||||
description: 'Fallback description.',
|
||||
path: '/some-path',
|
||||
});
|
||||
}
|
||||
|
||||
return generatePageMetadata({
|
||||
title: page.seo?.metaTitle ?? page.title,
|
||||
description: page.seo?.metaDescription ?? 'Fallback description.',
|
||||
path: '/some-path',
|
||||
keywords: page.seo?.keywords?.filter(Boolean) as string[] | undefined,
|
||||
noIndex: page.seo?.noIndex ?? false,
|
||||
});
|
||||
};
|
||||
|
||||
const SomePage = async () => {
|
||||
const page = await getPageBySlug('some-slug');
|
||||
|
||||
if (!page) return notFound();
|
||||
|
||||
return (
|
||||
<main>
|
||||
<RefreshRouteOnSave serverURL={env.NEXT_PUBLIC_SITE_URL} />
|
||||
<Script
|
||||
id='ld-json-some-page'
|
||||
type='application/ld+json'
|
||||
dangerouslySetInnerHTML={{
|
||||
__html: jsonLd({ '@context': 'https://schema.org' }),
|
||||
}}
|
||||
/>
|
||||
<LivePreviewPage page={page} serverURL={env.NEXT_PUBLIC_SITE_URL} />
|
||||
</main>
|
||||
);
|
||||
};
|
||||
|
||||
export default SomePage;
|
||||
```
|
||||
|
||||
If the page does not need JSON-LD, remove the `Script` import and block.
|
||||
|
||||
For service pages, do not create a separate static route unless there is a strong reason.
|
||||
Prefer the existing dynamic route at `apps/next/src/app/(frontend)/services/[slug]/page.tsx`.
|
||||
|
||||
## Copy-Paste Block Template
|
||||
|
||||
Use this when you need a new reusable Payload block for a page migration.
|
||||
|
||||
Schema file example:
|
||||
|
||||
```ts
|
||||
import type { Block } from 'payload';
|
||||
|
||||
export const ExampleBlock: Block = {
|
||||
slug: 'exampleBlock',
|
||||
labels: {
|
||||
singular: 'Example Block',
|
||||
plural: 'Example Blocks',
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'title',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
type: 'textarea',
|
||||
},
|
||||
{
|
||||
name: 'items',
|
||||
type: 'array',
|
||||
fields: [
|
||||
{
|
||||
name: 'label',
|
||||
type: 'text',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
```
|
||||
|
||||
Renderer example:
|
||||
|
||||
```tsx
|
||||
import type { Page } from '../../../../payload-types';
|
||||
|
||||
type ExampleBlockData = Extract<
|
||||
NonNullable<Page['layout']>[number],
|
||||
{ blockType: 'exampleBlock' }
|
||||
>;
|
||||
|
||||
export const ExampleBlockRenderer = ({
|
||||
block,
|
||||
}: {
|
||||
block: ExampleBlockData;
|
||||
}) => {
|
||||
return (
|
||||
<section>
|
||||
<h2>{block.title}</h2>
|
||||
{block.description && <p>{block.description}</p>}
|
||||
<ul>
|
||||
{block.items?.map((item) => (
|
||||
<li key={item.id}>{item.label}</li>
|
||||
))}
|
||||
</ul>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
Registration checklist:
|
||||
|
||||
1. export the schema block from `apps/next/src/payload/blocks/index.ts`
|
||||
2. add the block to `apps/next/src/payload/collections/pages.ts`
|
||||
3. add the renderer to `apps/next/src/components/payload/blocks/render-blocks.tsx`
|
||||
4. regenerate `apps/next/payload-types.ts`
|
||||
|
||||
Type generation command:
|
||||
|
||||
```bash
|
||||
cd apps/next
|
||||
bun with-env bunx payload generate:types --config src/payload.config.ts
|
||||
```
|
||||
|
||||
## Copy-Paste Seed Template
|
||||
|
||||
Use this when a new Payload-backed page should exist automatically in local/dev or fresh
|
||||
environments.
|
||||
|
||||
Seed document example:
|
||||
|
||||
```ts
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const examplePageSeed: Record<string, any> = {
|
||||
title: 'Example Page',
|
||||
slug: 'example',
|
||||
pageType: 'standard',
|
||||
layout: [
|
||||
{
|
||||
blockType: 'exampleBlock',
|
||||
title: 'Hello from Payload',
|
||||
},
|
||||
],
|
||||
seo: {
|
||||
metaTitle: 'Example Page',
|
||||
metaDescription: 'Example description.',
|
||||
noIndex: false,
|
||||
},
|
||||
_status: 'published',
|
||||
};
|
||||
```
|
||||
|
||||
Seed upsert example:
|
||||
|
||||
```ts
|
||||
const existing = await payload.find({
|
||||
collection: 'pages',
|
||||
where: { slug: { equals: 'example' } },
|
||||
limit: 1,
|
||||
});
|
||||
|
||||
if (existing.docs.length > 0) {
|
||||
await payload.update({
|
||||
collection: 'pages',
|
||||
id: existing.docs[0]!.id,
|
||||
draft: false,
|
||||
data: examplePageSeed as never,
|
||||
});
|
||||
} else {
|
||||
await payload.create({
|
||||
collection: 'pages',
|
||||
draft: false,
|
||||
data: examplePageSeed as never,
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
The important part is not the exact code shape. The important part is that a new
|
||||
Payload-backed route should not depend on manually created admin content if the page is
|
||||
expected to exist in every environment.
|
||||
|
||||
## How To Add A New Block
|
||||
|
||||
If editors need a new section type, add a new block.
|
||||
|
||||
### 1. Create the Payload schema block
|
||||
|
||||
Add a file under:
|
||||
|
||||
- `apps/next/src/payload/blocks/<block-name>.ts`
|
||||
|
||||
This file defines the editor-facing fields.
|
||||
|
||||
### 2. Export it from the block barrel
|
||||
|
||||
Update:
|
||||
|
||||
- `apps/next/src/payload/blocks/index.ts`
|
||||
|
||||
### 3. Register it in the `pages` collection
|
||||
|
||||
Update:
|
||||
|
||||
- `apps/next/src/payload/collections/pages.ts`
|
||||
|
||||
Add the block to the `layout.blocks` array.
|
||||
|
||||
### 4. Build the frontend renderer
|
||||
|
||||
Add:
|
||||
|
||||
- `apps/next/src/components/payload/blocks/<block-name>.tsx`
|
||||
|
||||
### 5. Register the renderer
|
||||
|
||||
Update:
|
||||
|
||||
- `apps/next/src/components/payload/blocks/render-blocks.tsx`
|
||||
|
||||
### 6. Seed it if needed
|
||||
|
||||
If the new block is used in default pages, update the seed files too.
|
||||
|
||||
## How To Migrate An Existing Hardcoded Page To Payload CMS
|
||||
|
||||
This is the full workflow for converting an old React page into the current CMS model.
|
||||
|
||||
### Step 1: Audit the existing page
|
||||
|
||||
Identify all of these before touching code:
|
||||
|
||||
- route path
|
||||
- metadata and title
|
||||
- structured data / JSON-LD
|
||||
- hero section
|
||||
- repeated content sections
|
||||
- CTA areas
|
||||
- any shared content that should become global settings
|
||||
|
||||
Do not start by blindly copying JSX into one giant rich text field. The current system is
|
||||
block-based for a reason.
|
||||
|
||||
### Step 2: Break the page into reusable sections
|
||||
|
||||
Ask which parts are:
|
||||
|
||||
- already represented by existing blocks
|
||||
- better suited as a new reusable block
|
||||
- global content instead of page-local content
|
||||
|
||||
Examples:
|
||||
|
||||
- hero section -> `landingHero` or `serviceHero`
|
||||
- card rows -> `cardGrid`
|
||||
- comparisons -> `tabularComparison` or `twoColumnComparison`
|
||||
- FAQ -> `faq`
|
||||
- pricing -> `pricingCards`
|
||||
|
||||
### Step 3: Create new blocks if the page needs them
|
||||
|
||||
If the existing block library is not enough:
|
||||
|
||||
1. add a schema block in `apps/next/src/payload/blocks/`
|
||||
2. add a renderer in `apps/next/src/components/payload/blocks/`
|
||||
3. register both sides
|
||||
|
||||
Keep blocks composable and editor-friendly. Prefer a few clearly named fields over one big
|
||||
opaque content blob.
|
||||
|
||||
### Step 4: Move SEO into document fields
|
||||
|
||||
Map metadata into the Payload document:
|
||||
|
||||
- title -> `seo.metaTitle` or route fallback
|
||||
- meta description -> `seo.metaDescription`
|
||||
- keywords -> `seo.keywords`
|
||||
- noindex -> `seo.noIndex`
|
||||
- service-specific schema -> `structuredData`
|
||||
|
||||
If the page uses JSON-LD, keep the generation logic in the route and read values from the
|
||||
Payload document.
|
||||
|
||||
### Step 5: Replace the hardcoded route with a Payload-backed route
|
||||
|
||||
The route should:
|
||||
|
||||
1. fetch the page with `getPageBySlug`
|
||||
2. return `notFound()` if it does not exist
|
||||
3. generate metadata from the doc
|
||||
4. render `RefreshRouteOnSave`
|
||||
5. render JSON-LD with `next/script` if needed
|
||||
6. render `LivePreviewPage`
|
||||
|
||||
For service pages, prefer reusing the dynamic service route instead of creating many
|
||||
one-off route files.
|
||||
|
||||
### Step 6: Seed the migrated page
|
||||
|
||||
If the page should exist locally or in fresh environments, add it to the seed system.
|
||||
|
||||
This prevents the common failure mode where the route now expects Payload content but the
|
||||
database has no corresponding document yet.
|
||||
|
||||
### Step 7: Verify the full editor flow
|
||||
|
||||
After migration, verify all of these:
|
||||
|
||||
- the route loads without 404
|
||||
- the document appears in `/admin`
|
||||
- draft save works
|
||||
- publish works
|
||||
- live preview updates while editing
|
||||
- route refreshes after save/publish
|
||||
- dark/light preview styling still looks correct
|
||||
- seeded content loads on a fresh database
|
||||
|
||||
### Step 8: Delete the old hardcoded page only after verification
|
||||
|
||||
Do not remove the old page implementation until the Payload-backed route is proven working.
|
||||
|
||||
For the service-page migration in this repo, the safe order was:
|
||||
|
||||
1. create Payload collection and blocks
|
||||
2. add frontend readers and renderers
|
||||
3. seed the docs
|
||||
4. verify save/publish/live preview
|
||||
5. remove the old hardcoded service page files
|
||||
|
||||
That order avoids breaking the site in the middle of the migration.
|
||||
|
||||
## Recommended Checklist For Future Migrations
|
||||
|
||||
Use this exact order:
|
||||
|
||||
1. model the content shape
|
||||
2. add or reuse blocks
|
||||
3. add renderers
|
||||
4. register blocks in the collection
|
||||
5. wire the route to Payload
|
||||
6. move metadata and structured data
|
||||
7. seed the content
|
||||
8. verify preview and publish
|
||||
9. remove old hardcoded components/routes
|
||||
|
||||
## Common Failure Modes
|
||||
|
||||
### 1. Page 404s after migration
|
||||
|
||||
Usually means the Payload document does not exist yet.
|
||||
|
||||
Check:
|
||||
|
||||
- the route slug
|
||||
- the document slug
|
||||
- whether the seed ran
|
||||
|
||||
### 2. Publish/save shows `Not Found`
|
||||
|
||||
Usually means middleware or proxy rules are intercepting Payload API writes before they
|
||||
reach Payload.
|
||||
|
||||
Check:
|
||||
|
||||
- `apps/next/src/proxy.ts`
|
||||
- `apps/next/src/lib/proxy/ban-sus-ips.ts`
|
||||
|
||||
### 3. Live preview frame loads but does not update
|
||||
|
||||
Usually means one of these is wrong:
|
||||
|
||||
- `NEXT_PUBLIC_SITE_URL`
|
||||
- Payload live preview config URL
|
||||
- missing `useLivePreview` on the frontend page
|
||||
- missing `RefreshRouteOnSave`
|
||||
|
||||
### 4. Dark mode preview looks wrong
|
||||
|
||||
Usually means the theme classes or body-level theme tokens are not being applied inside the
|
||||
preview iframe.
|
||||
|
||||
Check the frontend layout and ensure `bg-background` / `text-foreground` are applied at the
|
||||
body level.
|
||||
|
||||
## Important Files At A Glance
|
||||
|
||||
- config: `apps/next/src/payload.config.ts`
|
||||
- page collection: `apps/next/src/payload/collections/pages.ts`
|
||||
- media collection: `apps/next/src/payload/collections/media.ts`
|
||||
- global settings: `apps/next/src/payload/globals/site-settings.ts`
|
||||
- block schemas: `apps/next/src/payload/blocks/*`
|
||||
- block renderers: `apps/next/src/components/payload/blocks/*`
|
||||
- route helpers: `apps/next/src/lib/payload-helpers.tsx`
|
||||
- landing route: `apps/next/src/app/(frontend)/page.tsx`
|
||||
- contact route: `apps/next/src/app/(frontend)/contact/page.tsx`
|
||||
- service route: `apps/next/src/app/(frontend)/services/[slug]/page.tsx`
|
||||
- live preview client bridge: `apps/next/src/components/payload/live-preview-page.tsx`
|
||||
- save/publish refresh bridge: `apps/next/src/components/payload/refresh-route-on-save.tsx`
|
||||
- seed entrypoint: `apps/next/src/payload/seed/index.ts`
|
||||
|
||||
## Practical Rule Of Thumb
|
||||
|
||||
If the change is about editable marketing content, reach for Payload first.
|
||||
|
||||
If the change is about business logic, authenticated workflows, tickets, invoices,
|
||||
appointments, or portal/admin operations, it probably belongs in Convex instead.
|
||||
20
scripts/build-next-app
Executable file
20
scripts/build-next-app
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd -- "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
ENV_FILE="$ROOT_DIR/.env"
|
||||
COMPOSE_FILE="$ROOT_DIR/docker/compose.yml"
|
||||
|
||||
if [ ! -f "$ENV_FILE" ]; then
|
||||
echo "Error: env file not found at $ENV_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -a
|
||||
source "$ENV_FILE"
|
||||
set +a
|
||||
|
||||
sudo docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" build "$NEXT_CONTAINER_NAME"
|
||||
sudo docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d "$NEXT_CONTAINER_NAME"
|
||||
49
scripts/docker-compose
Executable file
49
scripts/docker-compose
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd -- "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
ENV_FILE="$ROOT_DIR/.env"
|
||||
COMPOSE_FILE="$ROOT_DIR/docker/compose.yml"
|
||||
|
||||
if [ ! -f "$ENV_FILE" ]; then
|
||||
echo "Error: env file not found at $ENV_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "$COMPOSE_FILE" ]; then
|
||||
echo "Error: compose file not found at $COMPOSE_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -a
|
||||
source "$ENV_FILE"
|
||||
set +a
|
||||
|
||||
translated_args=()
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
backend)
|
||||
: "${BACKEND_CONTAINER_NAME:?BACKEND_CONTAINER_NAME is not set in .env}"
|
||||
translated_args+=("$BACKEND_CONTAINER_NAME")
|
||||
;;
|
||||
dashboard)
|
||||
: "${DASHBOARD_CONTAINER_NAME:?DASHBOARD_CONTAINER_NAME is not set in .env}"
|
||||
translated_args+=("$DASHBOARD_CONTAINER_NAME")
|
||||
;;
|
||||
next)
|
||||
: "${NEXT_CONTAINER_NAME:?NEXT_CONTAINER_NAME is not set in .env}"
|
||||
translated_args+=("$NEXT_CONTAINER_NAME")
|
||||
;;
|
||||
*)
|
||||
translated_args+=("$arg")
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
exec sudo docker compose \
|
||||
--env-file "$ENV_FILE" \
|
||||
-f "$COMPOSE_FILE" \
|
||||
"${translated_args[@]}"
|
||||
20
scripts/generate-convex-admin-key
Executable file
20
scripts/generate-convex-admin-key
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd -- "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
ENV_FILE="$ROOT_DIR/.env"
|
||||
COMPOSE_FILE="$ROOT_DIR/docker/compose.yml"
|
||||
|
||||
if [ ! -f "$ENV_FILE" ]; then
|
||||
echo "Error: env file not found at $ENV_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -a
|
||||
source "$ENV_FILE"
|
||||
set +a
|
||||
|
||||
sudo docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" exec \
|
||||
"$BACKEND_CONTAINER_NAME" ./generate_admin_key.sh
|
||||
16
scripts/generate-convex-auth-keys.mjs
Normal file
16
scripts/generate-convex-auth-keys.mjs
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env node
|
||||
import { exportJWK, exportPKCS8, generateKeyPair } from 'jose';
|
||||
|
||||
const keys = await generateKeyPair('RS256', {
|
||||
extractable: true,
|
||||
});
|
||||
const privateKey = await exportPKCS8(keys.privateKey);
|
||||
const publicKey = await exportJWK(keys.publicKey);
|
||||
const jwks = JSON.stringify({ keys: [{ use: 'sig', ...publicKey }] });
|
||||
|
||||
process.stdout.write(
|
||||
`JWT_PRIVATE_KEY="${privateKey.trimEnd().replace(/\n/g, ' ')}"`,
|
||||
);
|
||||
process.stdout.write('\n');
|
||||
process.stdout.write(`JWKS=${jwks}`);
|
||||
process.stdout.write('\n');
|
||||
22
scripts/update-convex
Executable file
22
scripts/update-convex
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd -- "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
ENV_FILE="$ROOT_DIR/.env"
|
||||
COMPOSE_FILE="$ROOT_DIR/docker/compose.yml"
|
||||
|
||||
if [ ! -f "$ENV_FILE" ]; then
|
||||
echo "Error: env file not found at $ENV_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -a
|
||||
source "$ENV_FILE"
|
||||
set +a
|
||||
|
||||
sudo docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" pull \
|
||||
"$BACKEND_CONTAINER_NAME" "$DASHBOARD_CONTAINER_NAME"
|
||||
sudo docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d \
|
||||
"$BACKEND_CONTAINER_NAME" "$DASHBOARD_CONTAINER_NAME"
|
||||
21
scripts/update-next-app
Executable file
21
scripts/update-next-app
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
|
||||
ROOT_DIR="$(cd -- "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
ENV_FILE="$ROOT_DIR/.env"
|
||||
COMPOSE_FILE="$ROOT_DIR/docker/compose.yml"
|
||||
|
||||
if [ ! -f "$ENV_FILE" ]; then
|
||||
echo "Error: env file not found at $ENV_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -a
|
||||
source "$ENV_FILE"
|
||||
set +a
|
||||
|
||||
git pull
|
||||
sudo docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" build "$NEXT_CONTAINER_NAME"
|
||||
sudo docker compose --env-file "$ENV_FILE" -f "$COMPOSE_FILE" up -d "$NEXT_CONTAINER_NAME"
|
||||
@@ -4,6 +4,8 @@
|
||||
"globalEnv": [
|
||||
"NODE_ENV",
|
||||
"SENTRY_AUTH_TOKEN",
|
||||
"PAYLOAD_SECRET",
|
||||
"PAYLOAD_DB_URL",
|
||||
"NEXT_PUBLIC_SITE_URL",
|
||||
"NEXT_PUBLIC_CONVEX_URL",
|
||||
"NEXT_PUBLIC_PLAUSIBLE_URL",
|
||||
@@ -11,8 +13,6 @@
|
||||
"NEXT_PUBLIC_SENTRY_URL",
|
||||
"NEXT_PUBLIC_SENTRY_ORG",
|
||||
"NEXT_PUBLIC_SENTRY_PROJECT_NAME",
|
||||
"PAYLOAD_SECRET",
|
||||
"PAYLOAD_DB_URL",
|
||||
"CONVEX_SELF_HOSTED_URL",
|
||||
"CONVEX_SELF_HOSTED_ADMIN_KEY",
|
||||
"CONVEX_SITE_URL",
|
||||
|
||||
Reference in New Issue
Block a user