->(({ className, ...props }, ref) => (
- [role=checkbox]]:translate-y-[2px]',
- className,
- )}
- {...props}
- />
-));
-TableCell.displayName = 'TableCell';
-
-const TableCaption = React.forwardRef<
- HTMLTableCaptionElement,
- React.HTMLAttributes
->(({ className, ...props }, ref) => (
-
-));
-TableCaption.displayName = 'TableCaption';
-
-export {
- Table,
- TableHeader,
- TableBody,
- TableFooter,
- TableHead,
- TableRow,
- TableCell,
- TableCaption,
-};
diff --git a/src/env.js b/src/env.js
index 8c66c42..202e134 100644
--- a/src/env.js
+++ b/src/env.js
@@ -3,20 +3,31 @@ import { z } from 'zod';
export const env = createEnv({
/**
- * Specify your server-side environment variables schema here. This way you can ensure the app
- * isn't built with invalid env vars.
+ * Specify your server-side environment variables schema here.
+ * This way you can ensure the app isn't built with invalid env vars.
*/
server: {
- NODE_ENV: z.enum(['development', 'test', 'production']),
+ NODE_ENV: z
+ .enum(['development', 'test', 'production'])
+ .default('development'),
+ SENTRY_AUTH_TOKEN: z.string().min(1),
+ CI: z.enum(['true', 'false']).default('false'),
},
/**
- * Specify your client-side environment variables schema here. This way you can ensure the app
- * isn't built with invalid env vars. To expose them to the client, prefix them with
- * `NEXT_PUBLIC_`.
+ * Specify your client-side environment variables schema here.
+ * This way you can ensure the app isn't built with invalid env vars.
+ * To expose them to the client, prefix them with `NEXT_PUBLIC_`.
*/
client: {
- // NEXT_PUBLIC_CLIENTVAR: z.string(),
+ NEXT_PUBLIC_SUPABASE_URL: z.string().url(),
+ NEXT_PUBLIC_SUPABASE_ANON_KEY: z.string().min(1),
+ NEXT_PUBLIC_SITE_URL: z.string().url().default('http://localhost:3000'),
+ NEXT_PUBLIC_SENTRY_DSN: z.string().min(1),
+ NEXT_PUBLIC_SENTRY_URL: z
+ .string()
+ .url()
+ .default('https://sentry.gbrown.org'),
},
/**
@@ -25,7 +36,14 @@ export const env = createEnv({
*/
runtimeEnv: {
NODE_ENV: process.env.NODE_ENV,
- // NEXT_PUBLIC_CLIENTVAR: process.env.NEXT_PUBLIC_CLIENTVAR,
+ SENTRY_AUTH_TOKEN: process.env.SENTRY_AUTH_TOKEN,
+ CI: process.env.CI,
+
+ NEXT_PUBLIC_SUPABASE_URL: process.env.NEXT_PUBLIC_SUPABASE_URL,
+ NEXT_PUBLIC_SUPABASE_ANON_KEY: process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY,
+ NEXT_PUBLIC_SITE_URL: process.env.NEXT_PUBLIC_SITE_URL,
+ NEXT_PUBLIC_SENTRY_DSN: process.env.NEXT_PUBLIC_SENTRY_DSN,
+ NEXT_PUBLIC_SENTRY_URL: process.env.NEXT_PUBLIC_SENTRY_URL,
},
/**
* Run `build` or `dev` with `SKIP_ENV_VALIDATION` to skip env validation. This is especially
diff --git a/src/instrumentation-client.ts b/src/instrumentation-client.ts
new file mode 100644
index 0000000..7f44f8d
--- /dev/null
+++ b/src/instrumentation-client.ts
@@ -0,0 +1,36 @@
+// This file configures the initialization of Sentry on the client.
+// The added config here will be used whenever a users loads a page in their browser.
+// https://docs.sentry.io/platforms/javascript/guides/nextjs/
+import * as Sentry from '@sentry/nextjs';
+
+Sentry.init({
+ dsn: process.env.NEXT_PUBLIC_SENTRY_DSN!,
+
+ // Adds request headers and IP for users, for more info visit:
+ // https://docs.sentry.io/platforms/javascript/guides/nextjs/configuration/options/#sendDefaultPii
+ sendDefaultPii: true,
+
+ // Set tracesSampleRate to 1.0 to capture 100%
+ // of transactions for tracing.
+ // We recommend adjusting this value in production
+ // Learn more at
+ // https://docs.sentry.io/platforms/javascript/configuration/options/#traces-sample-rate
+ tracesSampleRate: 1.0,
+ // Replay may only be enabled for the client-side
+ integrations: [Sentry.replayIntegration()],
+
+ // Capture Replay for 10% of all sessions,
+ // plus for 100% of sessions with an error
+ // Learn more at
+ // https://docs.sentry.io/platforms/javascript/session-replay/configuration/#general-integration-configuration
+ replaysSessionSampleRate: 0.1,
+ replaysOnErrorSampleRate: 1.0,
+
+ // Note: if you want to override the automatic release value, do not set a
+ // `release` value here - use the environment variable `SENTRY_RELEASE`, so
+ // that it will also get attached to your source maps
+});
+
+// This export will instrument router navigations, and is only relevant if you enable tracing.
+// `captureRouterTransitionStart` is available from SDK version 9.12.0 onwards
+export const onRouterTransitionStart = Sentry.captureRouterTransitionStart;
diff --git a/src/instrumentation.ts b/src/instrumentation.ts
index 8aff09f..b77a16f 100644
--- a/src/instrumentation.ts
+++ b/src/instrumentation.ts
@@ -1,13 +1,10 @@
import * as Sentry from '@sentry/nextjs';
+import type { Instrumentation } from 'next';
-export async function register() {
- if (process.env.NEXT_RUNTIME === 'nodejs') {
- await import('../sentry.server.config');
- }
+export const register = async () => {
+ await import('../sentry.server.config');
+};
- if (process.env.NEXT_RUNTIME === 'edge') {
- await import('../sentry.edge.config');
- }
-}
-
-export const onRequestError = Sentry.captureRequestError;
+export const onRequestError: Instrumentation.onRequestError = (...args) => {
+ Sentry.captureRequestError(...args);
+};
diff --git a/src/lib/actions/auth.ts b/src/lib/actions/auth.ts
new file mode 100644
index 0000000..816ef76
--- /dev/null
+++ b/src/lib/actions/auth.ts
@@ -0,0 +1,155 @@
+'use server';
+
+import 'server-only';
+import { createServerClient } from '@/utils/supabase';
+import { headers } from 'next/headers';
+import type { User } from '@/utils/supabase';
+import type { Result } from '.';
+
+export const signUp = async (
+ formData: FormData,
+): Promise> => {
+ const name = formData.get('name') as string;
+ const email = formData.get('email') as string;
+ const password = formData.get('password') as string;
+ const supabase = await createServerClient();
+ const origin = (await headers()).get('origin');
+
+ if (!email || !password) {
+ return { success: false, error: 'Email and password are required' };
+ }
+
+ const { error } = await supabase.auth.signUp({
+ email,
+ password,
+ options: {
+ emailRedirectTo: `${origin}/auth/callback`,
+ data: {
+ full_name: name,
+ email,
+ provider: 'email',
+ },
+ },
+ });
+
+ if (error) {
+ return { success: false, error: error.message };
+ } else {
+ return {
+ success: true,
+ data: 'Thanks for signing up! Please check your email for a verification link.',
+ };
+ }
+};
+
+export const signIn = async (formData: FormData): Promise> => {
+ const email = formData.get('email') as string;
+ const password = formData.get('password') as string;
+ const supabase = await createServerClient();
+
+ const { error } = await supabase.auth.signInWithPassword({
+ email,
+ password,
+ });
+ if (error) {
+ return { success: false, error: error.message };
+ } else {
+ return { success: true, data: null };
+ }
+};
+
+export const signInWithMicrosoft = async (): Promise> => {
+ const supabase = await createServerClient();
+ const origin = (await headers()).get('origin');
+ const { data, error } = await supabase.auth.signInWithOAuth({
+ provider: 'azure',
+ options: {
+ scopes: 'openid, profile email offline_access',
+ redirectTo: `${origin}/auth/callback?redirect_to=/auth/success`,
+ },
+ });
+ if (error) return { success: false, error: error.message };
+ return { success: true, data: data.url };
+};
+
+export const signInWithApple = async (): Promise> => {
+ const supabase = await createServerClient();
+ const origin = process.env.BASE_URL!;
+ const { data, error } = await supabase.auth.signInWithOAuth({
+ provider: 'apple',
+ options: {
+ redirectTo: `${origin}/auth/callback?redirect_to=/auth/success`,
+ },
+ });
+ if (error) return { success: false, error: error.message };
+ return { success: true, data: data.url };
+};
+
+export const forgotPassword = async (
+ formData: FormData,
+): Promise> => {
+ const email = formData.get('email') as string;
+ const supabase = await createServerClient();
+ const origin = (await headers()).get('origin');
+
+ if (!email) {
+ return { success: false, error: 'Email is required' };
+ }
+
+ const { error } = await supabase.auth.resetPasswordForEmail(email, {
+ redirectTo: `${origin}/auth/callback?redirect_to=/reset-password`,
+ });
+
+ if (error) {
+ return { success: false, error: 'Could not reset password' };
+ }
+ return {
+ success: true,
+ data: 'Check your email for a link to reset your password.',
+ };
+};
+
+export const resetPassword = async (
+ formData: FormData,
+): Promise> => {
+ const password = formData.get('password') as string;
+ const confirmPassword = formData.get('confirmPassword') as string;
+ if (!password || !confirmPassword) {
+ return {
+ success: false,
+ error: 'Password and confirm password are required!',
+ };
+ }
+ const supabase = await createServerClient();
+ if (password !== confirmPassword) {
+ return { success: false, error: 'Passwords do not match!' };
+ }
+ const { error } = await supabase.auth.updateUser({
+ password,
+ });
+ if (error) {
+ return {
+ success: false,
+ error: `Password update failed: ${error.message}`,
+ };
+ }
+ return { success: true, data: null };
+};
+
+export const signOut = async (): Promise> => {
+ const supabase = await createServerClient();
+ const { error } = await supabase.auth.signOut();
+ if (error) return { success: false, error: error.message };
+ return { success: true, data: null };
+};
+
+export const getUser = async (): Promise> => {
+ try {
+ const supabase = await createServerClient();
+ const { data, error } = await supabase.auth.getUser();
+ if (error) throw error;
+ return { success: true, data: data.user };
+ } catch (error) {
+ return { success: false, error: 'Could not get user!' };
+ }
+};
diff --git a/src/lib/actions/index.ts b/src/lib/actions/index.ts
new file mode 100644
index 0000000..13e5fc8
--- /dev/null
+++ b/src/lib/actions/index.ts
@@ -0,0 +1,7 @@
+export * from './auth';
+export * from './storage';
+export * from './public';
+
+export type Result =
+ | { success: true; data: T }
+ | { success: false; error: string };
diff --git a/src/lib/actions/public.ts b/src/lib/actions/public.ts
new file mode 100644
index 0000000..2fdf3fb
--- /dev/null
+++ b/src/lib/actions/public.ts
@@ -0,0 +1,80 @@
+'use server';
+
+import 'server-only';
+import { createServerClient, type Profile } from '@/utils/supabase';
+import { getUser } from '@/lib/actions';
+import type { Result } from '.';
+
+export const getProfile = async (): Promise> => {
+ try {
+ const user = await getUser();
+ if (!user.success || user.data === undefined)
+ throw new Error('User not found');
+ const supabase = await createServerClient();
+ const { data, error } = await supabase
+ .from('profiles')
+ .select('*')
+ .eq('id', user.data.id)
+ .single();
+ if (error) throw error;
+ return { success: true, data: data as Profile };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error
+ ? error.message
+ : 'Unknown error getting profile',
+ };
+ }
+};
+
+type updateProfileProps = {
+ full_name?: string;
+ email?: string;
+ avatar_url?: string;
+};
+
+export const updateProfile = async ({
+ full_name,
+ email,
+ avatar_url,
+}: updateProfileProps): Promise> => {
+ try {
+ if (
+ full_name === undefined &&
+ email === undefined &&
+ avatar_url === undefined
+ )
+ throw new Error('No profile data provided');
+
+ const userResponse = await getUser();
+ if (!userResponse.success || userResponse.data === undefined)
+ throw new Error('User not found');
+
+ const supabase = await createServerClient();
+ const { data, error } = await supabase
+ .from('profiles')
+ .update({
+ ...(full_name !== undefined && { full_name }),
+ ...(email !== undefined && { email }),
+ ...(avatar_url !== undefined && { avatar_url }),
+ })
+ .eq('id', userResponse.data.id)
+ .select()
+ .single();
+ if (error) throw error;
+ return {
+ success: true,
+ data: data as Profile,
+ };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error
+ ? error.message
+ : 'Unknown error updating profile',
+ };
+ }
+};
diff --git a/src/lib/actions/storage.ts b/src/lib/actions/storage.ts
new file mode 100755
index 0000000..9612ff1
--- /dev/null
+++ b/src/lib/actions/storage.ts
@@ -0,0 +1,256 @@
+'use server';
+import 'server-only';
+import { createServerClient } from '@/utils/supabase';
+import type { Result } from '.';
+
+export type GetStorageProps = {
+ bucket: string;
+ url: string;
+ seconds?: number;
+ transform?: {
+ width?: number;
+ height?: number;
+ quality?: number;
+ format?: 'origin';
+ resize?: 'cover' | 'contain' | 'fill';
+ };
+ download?: boolean | string;
+};
+
+export type UploadStorageProps = {
+ bucket: string;
+ path: string;
+ file: File;
+ options?: {
+ cacheControl?: string;
+ contentType?: string;
+ };
+};
+
+export type ReplaceStorageProps = {
+ bucket: string;
+ path: string;
+ file: File;
+ options?: {
+ cacheControl?: string;
+ contentType?: string;
+ };
+};
+
+export type resizeImageProps = {
+ file: File;
+ options?: {
+ maxWidth?: number;
+ maxHeight?: number;
+ quality?: number;
+ };
+};
+
+export const getSignedUrl = async ({
+ bucket,
+ url,
+ seconds = 3600,
+ transform = {},
+ download = false,
+}: GetStorageProps): Promise> => {
+ try {
+ const supabase = await createServerClient();
+ const { data, error } = await supabase.storage
+ .from(bucket)
+ .createSignedUrl(url, seconds, {
+ download,
+ transform,
+ });
+
+ if (error) throw error;
+ if (!data?.signedUrl) throw new Error('No signed URL returned');
+
+ return { success: true, data: data.signedUrl };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error
+ ? error.message
+ : 'Unknown error getting signed URL',
+ };
+ }
+};
+
+export const getPublicUrl = async ({
+ bucket,
+ url,
+ transform = {},
+ download = false,
+}: GetStorageProps): Promise> => {
+ try {
+ const supabase = await createServerClient();
+ const { data } = supabase.storage.from(bucket).getPublicUrl(url, {
+ download,
+ transform,
+ });
+
+ if (!data?.publicUrl) throw new Error('No public URL returned');
+
+ return { success: true, data: data.publicUrl };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error
+ ? error.message
+ : 'Unknown error getting public URL',
+ };
+ }
+};
+
+export const uploadFile = async ({
+ bucket,
+ path,
+ file,
+ options = {},
+}: UploadStorageProps): Promise> => {
+ try {
+ const supabase = await createServerClient();
+ const { data, error } = await supabase.storage
+ .from(bucket)
+ .upload(path, file, options);
+
+ if (error) throw error;
+ if (!data?.path) throw new Error('No path returned from upload');
+
+ return { success: true, data: data.path };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error ? error.message : 'Unknown error uploading file',
+ };
+ }
+};
+
+export const replaceFile = async ({
+ bucket,
+ path,
+ file,
+ options = {},
+}: ReplaceStorageProps): Promise> => {
+ try {
+ const supabase = await createServerClient();
+ const { data, error } = await supabase.storage
+ .from(bucket)
+ .update(path, file, { ...options, upsert: true });
+ if (error) throw error;
+ if (!data?.path) throw new Error('No path returned from upload');
+ return { success: true, data: data.path };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error ? error.message : 'Unknown error replacing file',
+ };
+ }
+};
+
+// Add a helper to delete files
+export const deleteFile = async ({
+ bucket,
+ path,
+}: {
+ bucket: string;
+ path: string[];
+}): Promise> => {
+ try {
+ const supabase = await createServerClient();
+ const { error } = await supabase.storage.from(bucket).remove(path);
+
+ if (error) throw error;
+
+ return { success: true, data: null };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error ? error.message : 'Unknown error deleting file',
+ };
+ }
+};
+
+// Add a helper to list files in a bucket
+export const listFiles = async ({
+ bucket,
+ path = '',
+ options = {},
+}: {
+ bucket: string;
+ path?: string;
+ options?: {
+ limit?: number;
+ offset?: number;
+ sortBy?: { column: string; order: 'asc' | 'desc' };
+ };
+}): Promise>> => {
+ try {
+ const supabase = await createServerClient();
+ const { data, error } = await supabase.storage
+ .from(bucket)
+ .list(path, options);
+
+ if (error) throw error;
+ if (!data) throw new Error('No data returned from list operation');
+
+ return { success: true, data };
+ } catch (error) {
+ console.error('Could not list files!', error);
+ return {
+ success: false,
+ error:
+ error instanceof Error ? error.message : 'Unknown error listing files',
+ };
+ }
+};
+
+export const resizeImage = async ({
+ file,
+ options = {},
+}: resizeImageProps): Promise => {
+ const { maxWidth = 800, maxHeight = 800, quality = 0.8 } = options;
+ return new Promise((resolve) => {
+ const reader = new FileReader();
+ reader.readAsDataURL(file);
+ reader.onload = (event) => {
+ const img = new Image();
+ img.src = event.target?.result as string;
+ img.onload = () => {
+ let width = img.width;
+ let height = img.height;
+ if (width > height) {
+ if (width > maxWidth) {
+ height = Math.round((height * maxWidth) / width);
+ width = maxWidth;
+ }
+ } else if (height > maxHeight) {
+ width = Math.round((width * maxHeight) / height);
+ height = maxHeight;
+ }
+ const canvas = document.createElement('canvas');
+ canvas.width = width;
+ canvas.height = height;
+ const ctx = canvas.getContext('2d');
+ ctx?.drawImage(img, 0, 0, width, height);
+ canvas.toBlob(
+ (blob) => {
+ if (!blob) return;
+ const resizedFile = new File([blob], file.name, {
+ type: 'imgage/jpeg',
+ lastModified: Date.now(),
+ });
+ resolve(resizedFile);
+ },
+ 'image/jpeg',
+ quality,
+ );
+ };
+ };
+ });
+};
diff --git a/src/lib/hooks/auth.ts b/src/lib/hooks/auth.ts
new file mode 100644
index 0000000..e4e5ff3
--- /dev/null
+++ b/src/lib/hooks/auth.ts
@@ -0,0 +1,148 @@
+'use client';
+import { createClient } from '@/utils/supabase';
+import type { User } from '@/utils/supabase';
+import type { Result } from '.';
+
+export const signUp = async (
+ formData: FormData,
+): Promise> => {
+ const name = formData.get('name') as string;
+ const email = formData.get('email') as string;
+ const password = formData.get('password') as string;
+ const supabase = createClient();
+ const origin = process.env.NEXT_PUBLIC_SITE_URL!;
+
+ if (!email || !password) {
+ return { success: false, error: 'Email and password are required' };
+ }
+
+ const { error } = await supabase.auth.signUp({
+ email,
+ password,
+ options: {
+ emailRedirectTo: `${origin}/auth/callback`,
+ data: {
+ full_name: name,
+ email,
+ provider: 'email',
+ },
+ },
+ });
+ if (error) {
+ return { success: false, error: error.message };
+ } else {
+ return {
+ success: true,
+ data: 'Thanks for signing up! Please check your email for a verification link.',
+ };
+ }
+};
+
+export const signIn = async (formData: FormData): Promise> => {
+ const email = formData.get('email') as string;
+ const password = formData.get('password') as string;
+ const supabase = createClient();
+
+ const { error } = await supabase.auth.signInWithPassword({
+ email,
+ password,
+ });
+ if (error) {
+ return { success: false, error: error.message };
+ } else {
+ return { success: true, data: null };
+ }
+};
+
+export const signInWithMicrosoft = async (): Promise> => {
+ const supabase = createClient();
+ const { data, error } = await supabase.auth.signInWithOAuth({
+ provider: 'azure',
+ options: {
+ scopes: 'openid, profile email offline_access',
+ },
+ });
+ if (error) return { success: false, error: error.message };
+ return { success: true, data: data.url };
+};
+
+export const signInWithApple = async (): Promise> => {
+ const supabase = createClient();
+ const { data, error } = await supabase.auth.signInWithOAuth({
+ provider: 'apple',
+ options: {
+ scopes: 'openid, profile email offline_access',
+ },
+ });
+ if (error) return { success: false, error: error.message };
+ return { success: true, data: data.url };
+};
+
+export const forgotPassword = async (
+ formData: FormData,
+): Promise> => {
+ const email = formData.get('email') as string;
+ const supabase = createClient();
+ const origin = process.env.NEXT_PUBLIC_SITE_URL!;
+
+ if (!email) {
+ return { success: false, error: 'Email is required' };
+ }
+
+ const { error } = await supabase.auth.resetPasswordForEmail(email, {
+ redirectTo: `${origin}/auth/callback?redirect_to=/reset-password`,
+ });
+
+ if (error) {
+ return { success: false, error: 'Could not reset password' };
+ }
+ return {
+ success: true,
+ data: 'Check your email for a link to reset your password.',
+ };
+};
+
+export const resetPassword = async (
+ formData: FormData,
+): Promise> => {
+ const password = formData.get('password') as string;
+ const confirmPassword = formData.get('confirmPassword') as string;
+ if (!password || !confirmPassword) {
+ return {
+ success: false,
+ error: 'Password and confirm password are required!',
+ };
+ }
+ const supabase = createClient();
+ if (password !== confirmPassword) {
+ return { success: false, error: 'Passwords do not match!' };
+ }
+ const { error } = await supabase.auth.updateUser({
+ password,
+ });
+ if (error) {
+ return {
+ success: false,
+ error: `Password update failed: ${error.message}`,
+ };
+ }
+ return { success: true, data: null };
+};
+
+export const signOut = async (): Promise> => {
+ const supabase = createClient();
+ const { error } = await supabase.auth.signOut();
+ if (error) return { success: false, error: error.message };
+ return { success: true, data: null };
+};
+
+export const getUser = async (): Promise> => {
+ try {
+ const supabase = createClient();
+ const { data, error } = await supabase.auth.getUser();
+ if (error) throw error;
+ return { success: true, data: data.user };
+ } catch (error) {
+ return { success: false, error: 'Could not get user!' };
+ }
+};
diff --git a/src/lib/hooks/index.ts b/src/lib/hooks/index.ts
new file mode 100755
index 0000000..cd51501
--- /dev/null
+++ b/src/lib/hooks/index.ts
@@ -0,0 +1,9 @@
+export * from './auth';
+export * from './public';
+//export * from './resizeImage';
+export * from './storage';
+export * from './useFileUpload';
+
+export type Result =
+ | { success: true; data: T }
+ | { success: false; error: string };
diff --git a/src/lib/hooks/public.ts b/src/lib/hooks/public.ts
new file mode 100644
index 0000000..7329d33
--- /dev/null
+++ b/src/lib/hooks/public.ts
@@ -0,0 +1,79 @@
+'use client';
+
+import { createClient, type Profile } from '@/utils/supabase';
+import { getUser } from '@/lib/hooks';
+import type { Result } from '.';
+
+export const getProfile = async (): Promise> => {
+ try {
+ const user = await getUser();
+ if (!user.success || user.data === undefined)
+ throw new Error('User not found');
+ const supabase = createClient();
+ const { data, error } = await supabase
+ .from('profiles')
+ .select('*')
+ .eq('id', user.data.id)
+ .single();
+ if (error) throw error;
+ return { success: true, data: data as Profile };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error
+ ? error.message
+ : 'Unknown error getting profile',
+ };
+ }
+};
+
+type updateProfileProps = {
+ full_name?: string;
+ email?: string;
+ avatar_url?: string;
+};
+
+export const updateProfile = async ({
+ full_name,
+ email,
+ avatar_url,
+}: updateProfileProps): Promise> => {
+ try {
+ if (
+ full_name === undefined &&
+ email === undefined &&
+ avatar_url === undefined
+ )
+ throw new Error('No profile data provided');
+
+ const userResponse = await getUser();
+ if (!userResponse.success || userResponse.data === undefined)
+ throw new Error('User not found');
+
+ const supabase = createClient();
+ const { data, error } = await supabase
+ .from('profiles')
+ .update({
+ ...(full_name !== undefined && { full_name }),
+ ...(email !== undefined && { email }),
+ ...(avatar_url !== undefined && { avatar_url }),
+ })
+ .eq('id', userResponse.data.id)
+ .select()
+ .single();
+ if (error) throw error;
+ return {
+ success: true,
+ data: data as Profile,
+ };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error
+ ? error.message
+ : 'Unknown error updating profile',
+ };
+ }
+};
diff --git a/src/lib/hooks/storage.ts b/src/lib/hooks/storage.ts
new file mode 100644
index 0000000..8b11c9d
--- /dev/null
+++ b/src/lib/hooks/storage.ts
@@ -0,0 +1,259 @@
+'use client';
+
+import { createClient } from '@/utils/supabase';
+import type { Result } from '.';
+
+export type GetStorageProps = {
+ bucket: string;
+ url: string;
+ seconds?: number;
+ transform?: {
+ width?: number;
+ height?: number;
+ quality?: number;
+ format?: 'origin';
+ resize?: 'cover' | 'contain' | 'fill';
+ };
+ download?: boolean | string;
+};
+
+export type UploadStorageProps = {
+ bucket: string;
+ path: string;
+ file: File;
+ options?: {
+ cacheControl?: string;
+ contentType?: string;
+ };
+};
+
+export type ReplaceStorageProps = {
+ bucket: string;
+ path: string;
+ file: File;
+ options?: {
+ cacheControl?: string;
+ contentType?: string;
+ };
+};
+
+export type resizeImageProps = {
+ file: File;
+ options?: {
+ maxWidth?: number;
+ maxHeight?: number;
+ quality?: number;
+ };
+};
+
+export const getSignedUrl = async ({
+ bucket,
+ url,
+ seconds = 3600,
+ transform = {},
+ download = false,
+}: GetStorageProps): Promise> => {
+ try {
+ const supabase = createClient();
+ const { data, error } = await supabase.storage
+ .from(bucket)
+ .createSignedUrl(url, seconds, {
+ download,
+ transform,
+ });
+
+ if (error) throw error;
+ if (!data?.signedUrl) throw new Error('No signed URL returned');
+
+ return { success: true, data: data.signedUrl };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error
+ ? error.message
+ : 'Unknown error getting signed URL',
+ };
+ }
+};
+
+export const getPublicUrl = async ({
+ bucket,
+ url,
+ transform = {},
+ download = false,
+}: GetStorageProps): Promise> => {
+ try {
+ const supabase = createClient();
+ const { data } = supabase.storage.from(bucket).getPublicUrl(url, {
+ download,
+ transform,
+ });
+
+ if (!data?.publicUrl) throw new Error('No public URL returned');
+
+ return { success: true, data: data.publicUrl };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error
+ ? error.message
+ : 'Unknown error getting public URL',
+ };
+ }
+};
+
+export const uploadFile = async ({
+ bucket,
+ path,
+ file,
+ options = {},
+}: UploadStorageProps): Promise> => {
+ try {
+ const supabase = createClient();
+ const { data, error } = await supabase.storage
+ .from(bucket)
+ .upload(path, file, options);
+
+ if (error) throw error;
+ if (!data?.path) throw new Error('No path returned from upload');
+
+ return { success: true, data: data.path };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error ? error.message : 'Unknown error uploading file',
+ };
+ }
+};
+
+export const replaceFile = async ({
+ bucket,
+ path,
+ file,
+ options = {},
+}: ReplaceStorageProps): Promise> => {
+ try {
+ const supabase = createClient();
+ const { data, error } = await supabase.storage
+ .from(bucket)
+ .update(path, file, {
+ ...options,
+ upsert: true,
+ });
+ if (error) throw error;
+ if (!data?.path) throw new Error('No path returned from upload');
+ return { success: true, data: data.path };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error ? error.message : 'Unknown error replacing file',
+ };
+ }
+};
+
+// Add a helper to delete files
+export const deleteFile = async ({
+ bucket,
+ path,
+}: {
+ bucket: string;
+ path: string[];
+}): Promise> => {
+ try {
+ const supabase = createClient();
+ const { error } = await supabase.storage.from(bucket).remove(path);
+
+ if (error) throw error;
+
+ return { success: true, data: null };
+ } catch (error) {
+ return {
+ success: false,
+ error:
+ error instanceof Error ? error.message : 'Unknown error deleting file',
+ };
+ }
+};
+
+// Add a helper to list files in a bucket
+export const listFiles = async ({
+ bucket,
+ path = '',
+ options = {},
+}: {
+ bucket: string;
+ path?: string;
+ options?: {
+ limit?: number;
+ offset?: number;
+ sortBy?: { column: string; order: 'asc' | 'desc' };
+ };
+}): Promise>> => {
+ try {
+ const supabase = createClient();
+ const { data, error } = await supabase.storage
+ .from(bucket)
+ .list(path, options);
+
+ if (error) throw error;
+ if (!data) throw new Error('No data returned from list operation');
+
+ return { success: true, data };
+ } catch (error) {
+ console.error('Could not list files!', error);
+ return {
+ success: false,
+ error:
+ error instanceof Error ? error.message : 'Unknown error listing files',
+ };
+ }
+};
+
+export const resizeImage = async ({
+ file,
+ options = {},
+}: resizeImageProps): Promise => {
+ const { maxWidth = 800, maxHeight = 800, quality = 0.8 } = options;
+ return new Promise((resolve) => {
+ const reader = new FileReader();
+ reader.readAsDataURL(file);
+ reader.onload = (event) => {
+ const img = new Image();
+ img.src = event.target?.result as string;
+ img.onload = () => {
+ let width = img.width;
+ let height = img.height;
+ if (width > height) {
+ if (width > maxWidth) {
+ height = Math.round((height * maxWidth) / width);
+ width = maxWidth;
+ }
+ } else if (height > maxHeight) {
+ width = Math.round((width * maxHeight) / height);
+ height = maxHeight;
+ }
+ const canvas = document.createElement('canvas');
+ canvas.width = width;
+ canvas.height = height;
+ const ctx = canvas.getContext('2d');
+ ctx?.drawImage(img, 0, 0, width, height);
+ canvas.toBlob(
+ (blob) => {
+ if (!blob) return;
+ const resizedFile = new File([blob], file.name, {
+ type: 'imgage/jpeg',
+ lastModified: Date.now(),
+ });
+ resolve(resizedFile);
+ },
+ 'image/jpeg',
+ quality,
+ );
+ };
+ };
+ });
+};
diff --git a/src/lib/hooks/useFileUpload.ts b/src/lib/hooks/useFileUpload.ts
new file mode 100644
index 0000000..bfcd0c2
--- /dev/null
+++ b/src/lib/hooks/useFileUpload.ts
@@ -0,0 +1,105 @@
+'use client';
+
+import { useState, useRef } from 'react';
+import { replaceFile, uploadFile } from '@/lib/hooks';
+import { toast } from 'sonner';
+import { useAuth } from '@/components/context/auth';
+import { resizeImage } from '@/lib/hooks';
+import type { Result } from '.';
+
+export type Replace = { replace: true; path: string } | false;
+
+export type uploadToStorageProps = {
+ file: File;
+ bucket: string;
+ resize: boolean;
+ options?: {
+ maxWidth?: number;
+ maxHeight?: number;
+ quality?: number;
+ };
+ replace?: Replace;
+};
+
+export const useFileUpload = () => {
+ const [isUploading, setIsUploading] = useState(false);
+ const fileInputRef = useRef(null);
+ const { profile, isAuthenticated } = useAuth();
+
+ const uploadToStorage = async ({
+ file,
+ bucket,
+ resize = false,
+ options = {},
+ replace = false,
+ }: uploadToStorageProps): Promise> => {
+ try {
+ if (!isAuthenticated) throw new Error('User is not authenticated');
+
+ setIsUploading(true);
+ if (replace) {
+ const updateResult = await replaceFile({
+ bucket,
+ path: replace.path,
+ file,
+ options: {
+ contentType: file.type,
+ },
+ });
+ if (!updateResult.success) {
+ return { success: false, error: updateResult.error };
+ } else {
+ return { success: true, data: updateResult.data };
+ }
+ }
+
+ let fileToUpload = file;
+ if (resize && file.type.startsWith('image/'))
+ fileToUpload = await resizeImage({ file, options });
+
+ // Generate a unique filename to avoid collisions
+ const fileExt = file.name.split('.').pop();
+ const fileName = `${Date.now()}-${profile?.id}.${fileExt}`;
+
+ // Upload the file to Supabase storage
+ const uploadResult = await uploadFile({
+ bucket,
+ path: fileName,
+ file: fileToUpload,
+ options: {
+ contentType: file.type,
+ },
+ });
+
+ if (!uploadResult.success) {
+ throw new Error(uploadResult.error || `Failed to upload to ${bucket}`);
+ }
+
+ return { success: true, data: uploadResult.data };
+ } catch (error) {
+ toast.error(
+ error instanceof Error
+ ? error.message
+ : `Failed to upload to ${bucket}`,
+ );
+ return {
+ success: false,
+ error: `Error: ${
+ error instanceof Error
+ ? error.message
+ : `Failed to upload to ${bucket}`
+ }`,
+ };
+ } finally {
+ setIsUploading(false);
+ // Clear the input value so the same file can be selected again
+ if (fileInputRef.current) fileInputRef.current.value = '';
+ }
+ };
+
+ return {
+ isUploading,
+ fileInputRef,
+ uploadToStorage,
+ };
+};
diff --git a/src/lib/types.ts b/src/lib/types.ts
deleted file mode 100644
index 10f829c..0000000
--- a/src/lib/types.ts
+++ /dev/null
@@ -1,25 +0,0 @@
-export type User = {
- id: string;
- full_name?: string;
- email: string;
- avatar_url?: string;
- provider: string;
- updated_at?: Date;
-};
-
-export type Status = {
- user: User;
- status: string;
- created_at: Date;
- updated_by: User;
-};
-
-export type PaginatedHistory = {
- statuses: Status[];
- meta: {
- current_page: number;
- per_page: number;
- total_pages: number;
- total_count: number;
- };
-};
diff --git a/src/middleware.ts b/src/middleware.ts
index 369c5c9..196c89d 100644
--- a/src/middleware.ts
+++ b/src/middleware.ts
@@ -1,20 +1,21 @@
import { type NextRequest } from 'next/server';
import { updateSession } from '@/utils/supabase/middleware';
-export async function middleware(request: NextRequest) {
- // update user's auth session
+export const middleware = async (request: NextRequest) => {
return await updateSession(request);
-}
+};
export const config = {
matcher: [
/*
- * Match all request paths except for the ones starting with:
+ * Match all request paths except:
* - _next/static (static files)
* - _next/image (image optimization files)
* - favicon.ico (favicon file)
+ * - /monitoring-tunnel (Sentry monitoring)
+ * - images - .svg, .png, .jpg, .jpeg, .gif, .webp
* Feel free to modify this pattern to include more paths.
*/
- '/((?!_next/static|_next/image|favicon.ico|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)',
+ '/((?!_next/static|_next/image|favicon.ico|monitoring-tunnel|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)',
],
};
diff --git a/src/server/actions/auth.ts b/src/server/actions/auth.ts
deleted file mode 100644
index 8aa5794..0000000
--- a/src/server/actions/auth.ts
+++ /dev/null
@@ -1,48 +0,0 @@
-'use server';
-
-import 'server-only';
-import { revalidatePath } from 'next/cache';
-import { redirect } from 'next/navigation';
-
-import { createClient } from '@/utils/supabase/server';
-
-export const login = async (formData: FormData) => {
- const supabase = await createClient();
-
- // type-casting here for convenience
- // in practice, you should validate your inputs
- const data = {
- email: formData.get('email') as string,
- password: formData.get('password') as string,
- };
-
- const { error } = await supabase.auth.signInWithPassword(data);
-
- if (error) {
- redirect('/error');
- }
-
- revalidatePath('/', 'layout');
- redirect('/?refresh=true');
-};
-
-export const signup = async (formData: FormData) => {
- const supabase = await createClient();
-
- // type-casting here for convenience
- // in practice, you should validate your inputs
- const data = {
- fullName: formData.get('fullName') as string,
- email: formData.get('email') as string,
- password: formData.get('password') as string,
- };
-
- const { error } = await supabase.auth.signUp(data);
-
- if (error) {
- redirect('/error');
- }
-
- revalidatePath('/', 'layout');
- redirect('/?refresh=true');
-};
diff --git a/src/server/actions/image.ts b/src/server/actions/image.ts
deleted file mode 100644
index 3dd3d77..0000000
--- a/src/server/actions/image.ts
+++ /dev/null
@@ -1,47 +0,0 @@
-'use server';
-import 'server-only';
-import { createClient } from '@/utils/supabase/server';
-
-export const getImageUrl = async (
- bucket: string,
- path: string,
-): Promise => {
- try {
- const supabase = await createClient();
-
- // Download the image as a blob
- const { data, error } = await supabase.storage.from(bucket).download(path);
-
- if (error) {
- console.error('Error downloading image:', error);
- throw new Error(`Failed to download image: ${error.message}`);
- }
-
- if (!data) {
- throw new Error('No data received from storage');
- }
-
- // Convert blob to base64 string on the server
- const arrayBuffer = await data.arrayBuffer();
- const buffer = Buffer.from(arrayBuffer);
- const base64 = buffer.toString('base64');
-
- // Determine MIME type from file extension or default to octet-stream
- let mimeType = 'application/octet-stream';
- if (path.endsWith('.png')) mimeType = 'image/png';
- else if (path.endsWith('.jpg') || path.endsWith('.jpeg'))
- mimeType = 'image/jpeg';
- else if (path.endsWith('.gif')) mimeType = 'image/gif';
- else if (path.endsWith('.svg')) mimeType = 'image/svg+xml';
- else if (path.endsWith('.webp')) mimeType = 'image/webp';
-
- // Return as data URL
- return `data:${mimeType};base64,${base64}`;
- } catch (error) {
- console.error(
- 'Error processing image:',
- error instanceof Error ? error.message : String(error),
- );
- throw new Error('Failed to process image');
- }
-};
diff --git a/src/server/actions/status.ts b/src/server/actions/status.ts
deleted file mode 100644
index 49e9a1c..0000000
--- a/src/server/actions/status.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-'use server';
-import 'server-only';
-import { createClient } from '@/utils/supabase/server';
-import type { User, PaginatedHistory } from '@/lib/types';
-
-type fetchHistoryProps = {
- currentPage?: number;
- user?: User | null;
-};
-
-export const fetchHistory = async ({
- currentPage = 1,
- user = null,
-}: fetchHistoryProps): PaginatedHistory => {
- const supabase = createClient();
-};
diff --git a/src/server/db/schema.sql b/src/server/db/schema.sql
index 3778cb4..d0930b5 100644
--- a/src/server/db/schema.sql
+++ b/src/server/db/schema.sql
@@ -2,7 +2,7 @@
create table profiles (
id uuid references auth.users on delete cascade not null primary key,
updated_at timestamp with time zone,
- email text,
+ email text unique,
full_name text,
avatar_url text,
provider text,
@@ -35,7 +35,7 @@ begin
new.id,
new.email,
new.raw_user_meta_data->>'full_name',
- new.raw_user_meta_data->>'avatar_url'
+ new.raw_user_meta_data->>'avatar_url',
new.raw_user_meta_data->>'provider',
now()
);
@@ -58,48 +58,68 @@ create policy "Avatar images are publicly accessible." on storage.objects
create policy "Anyone can upload an avatar." on storage.objects
for insert with check (bucket_id = 'avatars');
+create policy "Anyone can update an avatar." on storage.objects
+ for update using (bucket_id = 'avatars');
--- Create a table for public statuses
-CREATE TABLE statuses (
- id uuid DEFAULT gen_random_uuid() PRIMARY KEY,
- user_id uuid REFERENCES auth.users ON DELETE CASCADE NOT NULL,
- updated_by_id uuid REFERENCES auth.users ON DELETE SET NULL DEFAULT auth.uid(),
- created_at timestamp with time zone DEFAULT now() NOT NULL,
- status text NOT NULL,
- CONSTRAINT status_length CHECK (char_length(status) >= 3 AND char_length(status) <= 80),
- CONSTRAINT statuses_user_id_fkey FOREIGN KEY (user_id) REFERENCES profiles(id) ON DELETE CASCADE
-);
+create policy "Anyone can delete an avatar." on storage.objects
+ for delete using (bucket_id = 'avatars');
--- Set up Row Level Security (RLS)
-ALTER TABLE statuses
- ENABLE ROW LEVEL SECURITY;
+-- -- Create a table for public statuses
+-- CREATE TABLE statuses (
+ -- id uuid DEFAULT gen_random_uuid() PRIMARY KEY,
+ -- user_id uuid REFERENCES auth.users ON DELETE CASCADE NOT NULL,
+ -- updated_by_id uuid REFERENCES auth.users ON DELETE SET NULL DEFAULT auth.uid(),
+ -- created_at timestamp with time zone DEFAULT now() NOT NULL,
+ -- status text NOT NULL,
+ -- CONSTRAINT status_length CHECK (char_length(status) >= 3 AND char_length(status) <= 80)
+-- );
--- Policies
-CREATE POLICY "Public statuses are viewable by everyone." ON statuses
- FOR SELECT USING (true);
+-- -- Set up Row Level Security (RLS)
+-- ALTER TABLE statuses
+ -- ENABLE ROW LEVEL SECURITY;
-CREATE POLICY "Users can insert statuses for any user." ON statuses
- FOR INSERT WITH CHECK (auth.role() = 'authenticated');
+-- -- Policies
+-- CREATE POLICY "Public statuses are viewable by everyone." ON statuses
+ -- FOR SELECT USING (true);
--- Function to add first status
-CREATE FUNCTION public.handle_first_status()
-RETURNS TRIGGER
-SET search_path = ''
-AS $$
-BEGIN
- INSERT INTO public.statuses (user_id, updated_by_id, status)
- VALUES (
- NEW.id,
- NEW.id,
- 'Just joined!'
- );
- RETURN NEW;
-END;
-$$ LANGUAGE plpgsql SECURITY DEFINER;
+-- -- RECREATE it using the recommended sub-select form
+-- CREATE POLICY "Authenticated users can insert statuses for any user."
+ -- ON public.statuses
+ -- FOR INSERT
+ -- WITH CHECK (
+ -- (SELECT auth.role()) = 'authenticated'
+ -- );
--- Create a separate trigger for the status
-CREATE TRIGGER on_auth_user_created_add_status
- AFTER INSERT ON auth.users
- FOR EACH ROW EXECUTE PROCEDURE public.handle_first_status();
+-- -- ADD an UPDATE policy so anyone signed-in can update *any* status
+-- CREATE POLICY "Authenticated users can update statuses for any user."
+ -- ON public.statuses
+ -- FOR UPDATE
+ -- USING (
+ -- (SELECT auth.role()) = 'authenticated'
+ -- )
+ -- WITH CHECK (
+ -- (SELECT auth.role()) = 'authenticated'
+ -- );
-alter publication supabase_realtime add table statuses;
+-- -- Function to add first status
+-- CREATE FUNCTION public.handle_first_status()
+-- RETURNS TRIGGER
+-- SET search_path = ''
+-- AS $$
+-- BEGIN
+ -- INSERT INTO public.statuses (user_id, updated_by_id, status)
+ -- VALUES (
+ -- NEW.id,
+ -- NEW.id,
+ -- 'Just joined!'
+ -- );
+ -- RETURN NEW;
+-- END;
+-- $$ LANGUAGE plpgsql SECURITY DEFINER;
+
+-- -- Create a separate trigger for the status
+-- CREATE TRIGGER on_auth_user_created_add_status
+ -- AFTER INSERT ON auth.users
+ -- FOR EACH ROW EXECUTE PROCEDURE public.handle_first_status();
+
+-- alter publication supabase_realtime add table statuses;
diff --git a/src/server/docker/.env.example b/src/server/docker/.env.example
new file mode 100644
index 0000000..d3c04e1
--- /dev/null
+++ b/src/server/docker/.env.example
@@ -0,0 +1,154 @@
+############
+# Secrets
+# YOU MUST CHANGE THESE BEFORE GOING INTO PRODUCTION
+############
+
+POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
+JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
+ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
+SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
+DASHBOARD_USERNAME=gib
+DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated
+SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
+VAULT_ENC_KEY=your-encryption-key-32-chars-min
+
+
+############
+# Database - You can change these to any PostgreSQL database that has logical replication enabled.
+############
+
+POSTGRES_HOST=db
+POSTGRES_DB=postgres
+POSTGRES_PORT=5432
+# default user is postgres
+
+
+############
+# Supavisor -- Database pooler
+############
+POOLER_PROXY_PORT_TRANSACTION=6543
+POOLER_DEFAULT_POOL_SIZE=20
+POOLER_MAX_CLIENT_CONN=100
+POOLER_TENANT_ID=your-tenant-id # Change me
+
+
+############
+# API Proxy - Configuration for the Kong Reverse proxy.
+############
+
+KONG_HTTP_PORT=8000
+KONG_HTTPS_PORT=8443
+
+
+############
+# API - Configuration for PostgREST.
+############
+
+PGRST_DB_SCHEMAS=public,storage,graphql_public
+
+
+############
+# Auth - Configuration for the GoTrue authentication server.
+############
+
+## General
+SITE_URL=http://localhost:3000 # Change to URL of site used for email links/auth flows
+ADDITIONAL_REDIRECT_URLS= # Change to include any redirect URIs needed
+JWT_EXPIRY=3600
+DISABLE_SIGNUP=false
+API_EXTERNAL_URL=http://localhost:8000 # Should be the same as the SITE URL usually.
+
+## Mailer Config
+MAILER_URLPATHS_CONFIRMATION="/auth/callback"
+MAILER_URLPATHS_INVITE="/auth/callback"
+MAILER_URLPATHS_RECOVERY="/auth/callback"
+MAILER_URLPATHS_EMAIL_CHANGE="/auth/callback"
+
+## Email auth
+ENABLE_EMAIL_SIGNUP=true
+ENABLE_EMAIL_AUTOCONFIRM=false
+SMTP_ADMIN_EMAIL=admin@example.com
+SMTP_HOST=supabase-mail
+SMTP_PORT=2500
+SMTP_USER=fake_mail_user
+SMTP_PASS=fake_mail_password
+SMTP_SENDER_NAME=fake_sender
+ENABLE_ANONYMOUS_USERS=false
+
+
+MAILER_TEMPLATES_INVITE="https://git.gbrown.org/gib/T3-Template/raw/branch/main/src/server/mail_templates/invite_user.html"
+MAILER_TEMPLATES_CONFIRMATION="https://git.gbrown.org/gib/T3-Template/raw/branch/main/src/server/mail_templates/confirm_signup.html"
+MAILER_TEMPLATES_RECOVERY="https://git.gbrown.org/gib/T3-Template/raw/branch/main/src/server/mail_templates/reset_password.html"
+MAILER_TEMPLATES_MAGIC_LINK="https://git.gbrown.org/gib/T3-Template/raw/branch/main/src/server/mail_templates/magic_link.html"
+MAILER_TEMPLATES_EMAIL_CHANGE="https://git.gbrown.org/gib/T3-Template/raw/branch/main/src/server/mail_templates/change_email_address.html"
+
+MAILER_SUBJECTS_INVITE="You've Been Invited!"
+MAILER_SUBJECTS_CONFIRMATION="Confirm Your Email"
+MAILER_SUBJECTS_RECOVERY="Reset Password"
+MAILER_SUBJECTS_MAGIC_LINK="Magic Sign In Link"
+MAILER_SUBJECTS_EMAIL_CHANGE="Change Email Address"
+
+
+## Phone auth
+ENABLE_PHONE_SIGNUP=false
+ENABLE_PHONE_AUTOCONFIRM=false
+
+
+# Apple Auth
+APPLE_ENABLED=true
+APPLE_CLIENT_ID=
+APPLE_SECRET=
+APPLE_REDIRECT_URI=
+APPLE_TEAM_ID=
+APPLE_KEY_ID=
+
+# Azure Auth
+AZURE_ENABLED=true
+AZURE_CLIENT_ID=
+AZURE_SECRET=
+AZURE_REDIRECT_URI=
+AZURE_TENANT_ID=
+AZURE_TENANT_URL=
+
+
+############
+# Studio - Configuration for the Dashboard
+############
+
+STUDIO_DEFAULT_ORGANIZATION=gbrown
+STUDIO_DEFAULT_PROJECT=Default Project
+
+STUDIO_PORT=3000
+# replace if you intend to use Studio outside of localhost
+SUPABASE_PUBLIC_URL=https://localhost:8000 # Change to URL for this supabase instance
+
+# Enable webp support
+IMGPROXY_ENABLE_WEBP_DETECTION=true
+
+# Add your OpenAI API key to enable SQL Editor Assistant
+OPENAI_API_KEY=
+
+
+############
+# Functions - Configuration for Functions
+############
+# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet.
+FUNCTIONS_VERIFY_JWT=false
+
+
+############
+# Logs - Configuration for Logflare
+# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction
+############
+
+LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
+
+# Change vector.toml sinks to reflect this change
+LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
+
+# Docker socket location - this value will differ depending on your OS
+DOCKER_SOCKET_LOCATION=/var/run/docker.sock
+
+# Google Cloud Project details
+#GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID
+#GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER
diff --git a/src/server/docker/docker-compose.dev.yml b/src/server/docker/docker-compose.dev.yml
new file mode 100644
index 0000000..85cb604
--- /dev/null
+++ b/src/server/docker/docker-compose.dev.yml
@@ -0,0 +1,41 @@
+networks:
+ supabase-network:
+ name: supabase-network
+ driver: bridge
+ ipam:
+ config:
+ - subnet: 172.20.0.0/16
+services:
+ studio:
+ build:
+ context: .
+ dockerfile: studio/Dockerfile
+ target: dev
+ networks: [supabase-network]
+ ports:
+ - 8082:8082
+ mail:
+ container_name: supabase-mail
+ image: inbucket/inbucket:3.0.3
+ networks: [supabase-network]
+ ports:
+ - '2500:2500' # SMTP
+ - '9000:9000' # web interface
+ - '1100:1100' # POP3
+ auth:
+ environment:
+ - GOTRUE_SMTP_USER=
+ - GOTRUE_SMTP_PASS=
+ meta:
+ ports:
+ - 5555:8080
+ db:
+ restart: 'no'
+ volumes:
+ # Always use a fresh database when developing
+ - /var/lib/postgresql/data
+ # Seed data should be inserted last (alphabetical order)
+ - ../db/schema.sql:/docker-entrypoint-initdb.d/seed.sql
+ storage:
+ volumes:
+ - /var/lib/storage
diff --git a/src/server/docker/docker-compose.s3.yml b/src/server/docker/docker-compose.s3.yml
new file mode 100644
index 0000000..18c7866
--- /dev/null
+++ b/src/server/docker/docker-compose.s3.yml
@@ -0,0 +1,105 @@
+
+networks:
+ supabase-network:
+ name: supabase-network
+ driver: bridge
+ ipam:
+ config:
+ - subnet: 172.20.0.0/16
+services:
+ minio:
+ image: minio/minio
+ networks: [supabase-network]
+ ports:
+ - '9000:9000'
+ - '9001:9001'
+ environment:
+ MINIO_ROOT_USER: supa-storage
+ MINIO_ROOT_PASSWORD: secret1234
+ command: server --console-address ":9001" /data
+ healthcheck:
+ test: [ "CMD", "curl", "-f", "http://minio:9000/minio/health/live" ]
+ interval: 2s
+ timeout: 10s
+ retries: 5
+ volumes:
+ - ./volumes/storage:/data:z
+
+ minio-createbucket:
+ image: minio/mc
+ networks: [supabase-network]
+ depends_on:
+ minio:
+ condition: service_healthy
+ entrypoint: >
+ /bin/sh -c "
+ /usr/bin/mc alias set supa-minio http://minio:9000 supa-storage secret1234;
+ /usr/bin/mc mb supa-minio/stub;
+ exit 0;
+ "
+
+ storage:
+ container_name: supabase-storage
+ image: supabase/storage-api:v1.11.13
+ networks: [supabase-network]
+ depends_on:
+ db:
+ # Disable this if you are using an external Postgres database
+ condition: service_healthy
+ rest:
+ condition: service_started
+ imgproxy:
+ condition: service_started
+ minio:
+ condition: service_healthy
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "wget",
+ "--no-verbose",
+ "--tries=1",
+ "--spider",
+ "http://localhost:5000/status"
+ ]
+ timeout: 5s
+ interval: 5s
+ retries: 3
+ restart: unless-stopped
+ environment:
+ ANON_KEY: ${ANON_KEY}
+ SERVICE_KEY: ${SERVICE_ROLE_KEY}
+ POSTGREST_URL: http://rest:3000
+ PGRST_JWT_SECRET: ${JWT_SECRET}
+ DATABASE_URL: postgres://supabase_storage_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
+ FILE_SIZE_LIMIT: 52428800
+ STORAGE_BACKEND: s3
+ GLOBAL_S3_BUCKET: stub
+ GLOBAL_S3_ENDPOINT: http://minio:9000
+ GLOBAL_S3_PROTOCOL: http
+ GLOBAL_S3_FORCE_PATH_STYLE: true
+ AWS_ACCESS_KEY_ID: supa-storage
+ AWS_SECRET_ACCESS_KEY: secret1234
+ AWS_DEFAULT_REGION: stub
+ FILE_STORAGE_BACKEND_PATH: /var/lib/storage
+ TENANT_ID: stub
+ # TODO: https://github.com/supabase/storage-api/issues/55
+ REGION: stub
+ ENABLE_IMAGE_TRANSFORMATION: "true"
+ IMGPROXY_URL: http://imgproxy:5001
+ volumes:
+ - ./volumes/storage:/var/lib/storage:z
+
+ imgproxy:
+ container_name: supabase-imgproxy
+ image: darthsim/imgproxy:v3.8.0
+ networks: [supabase-network]
+ healthcheck:
+ test: [ "CMD", "imgproxy", "health" ]
+ timeout: 5s
+ interval: 5s
+ retries: 3
+ environment:
+ IMGPROXY_BIND: ":5001"
+ IMGPROXY_USE_ETAG: "true"
+ IMGPROXY_ENABLE_WEBP_DETECTION: ${IMGPROXY_ENABLE_WEBP_DETECTION}
diff --git a/src/server/docker/docker-compose.yml b/src/server/docker/docker-compose.yml
new file mode 100644
index 0000000..2a78f6e
--- /dev/null
+++ b/src/server/docker/docker-compose.yml
@@ -0,0 +1,576 @@
+# Usage
+# Start: docker compose up
+# With helpers: docker compose -f docker-compose.yml -f ./dev/docker-compose.dev.yml up
+# Stop: docker compose down
+# Destroy: docker compose -f docker-compose.yml -f ./dev/docker-compose.dev.yml down -v --remove-orphans
+# Reset everything: ./reset.sh
+
+name: supabase
+
+networks:
+ supabase-network:
+ name: supabase-network
+ driver: bridge
+ ipam:
+ config:
+ - subnet: 172.20.0.0/16
+
+services:
+
+ studio:
+ container_name: supabase-studio
+ image: supabase/studio:2025.05.19-sha-3487831
+ networks: [supabase-network]
+ restart: unless-stopped
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "node",
+ "-e",
+ "fetch('http://studio:3000/api/platform/profile').then((r) => {if (r.status !== 200) throw new Error(r.status)})"
+ ]
+ timeout: 10s
+ interval: 5s
+ retries: 3
+ depends_on:
+ analytics:
+ condition: service_healthy
+ environment:
+ STUDIO_PG_META_URL: http://meta:8080
+ POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
+
+ DEFAULT_ORGANIZATION_NAME: ${STUDIO_DEFAULT_ORGANIZATION}
+ DEFAULT_PROJECT_NAME: ${STUDIO_DEFAULT_PROJECT}
+ OPENAI_API_KEY: ${OPENAI_API_KEY:-}
+
+ SUPABASE_URL: http://kong:8000
+ SUPABASE_PUBLIC_URL: ${SUPABASE_PUBLIC_URL}
+ SUPABASE_ANON_KEY: ${ANON_KEY}
+ SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY}
+ AUTH_JWT_SECRET: ${JWT_SECRET}
+
+ LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
+ LOGFLARE_URL: http://analytics:4000
+ NEXT_PUBLIC_ENABLE_LOGS: true
+ # Comment to use Big Query backend for analytics
+ NEXT_ANALYTICS_BACKEND_PROVIDER: postgres
+ # Uncomment to use Big Query backend for analytics
+ # NEXT_ANALYTICS_BACKEND_PROVIDER: bigquery
+
+ kong:
+ container_name: supabase-kong
+ image: kong:2.8.1
+ networks: [supabase-network]
+ restart: unless-stopped
+ ports:
+ - ${KONG_HTTP_PORT}:8000/tcp
+ - ${KONG_HTTPS_PORT}:8443/tcp
+ volumes:
+ # https://github.com/supabase/supabase/issues/12661
+ - ./volumes/api/kong.yml:/home/kong/temp.yml:ro,z
+ depends_on:
+ analytics:
+ condition: service_healthy
+ environment:
+ KONG_DATABASE: "off"
+ KONG_DECLARATIVE_CONFIG: /home/kong/kong.yml
+ # https://github.com/supabase/cli/issues/14
+ KONG_DNS_ORDER: LAST,A,CNAME
+ KONG_PLUGINS: request-transformer,cors,key-auth,acl,basic-auth
+ KONG_NGINX_PROXY_PROXY_BUFFER_SIZE: 160k
+ KONG_NGINX_PROXY_PROXY_BUFFERS: 64 160k
+ SUPABASE_ANON_KEY: ${ANON_KEY}
+ SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY}
+ DASHBOARD_USERNAME: ${DASHBOARD_USERNAME}
+ DASHBOARD_PASSWORD: ${DASHBOARD_PASSWORD}
+ # https://unix.stackexchange.com/a/294837
+ entrypoint: bash -c 'eval "echo \"$$(cat ~/temp.yml)\"" > ~/kong.yml && /docker-entrypoint.sh kong docker-start'
+
+ auth:
+ container_name: supabase-auth
+ image: supabase/gotrue:v2.172.1
+ networks: [supabase-network]
+ restart: unless-stopped
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "wget",
+ "--no-verbose",
+ "--tries=1",
+ "--spider",
+ "http://localhost:9999/health"
+ ]
+ timeout: 5s
+ interval: 5s
+ retries: 3
+ depends_on:
+ db:
+ # Disable this if you are using an external Postgres database
+ condition: service_healthy
+ analytics:
+ condition: service_healthy
+ environment:
+ GOTRUE_API_HOST: 0.0.0.0
+ GOTRUE_API_PORT: 9999
+ API_EXTERNAL_URL: ${API_EXTERNAL_URL}
+
+ GOTRUE_DB_DRIVER: postgres
+ GOTRUE_DB_DATABASE_URL: postgres://supabase_auth_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
+
+ GOTRUE_SITE_URL: ${SITE_URL}
+ GOTRUE_URI_ALLOW_LIST: ${ADDITIONAL_REDIRECT_URLS}
+ GOTRUE_DISABLE_SIGNUP: ${DISABLE_SIGNUP}
+
+ GOTRUE_JWT_ADMIN_ROLES: service_role
+ GOTRUE_JWT_AUD: authenticated
+ GOTRUE_JWT_DEFAULT_GROUP_NAME: authenticated
+ GOTRUE_JWT_EXP: ${JWT_EXPIRY}
+ GOTRUE_JWT_SECRET: ${JWT_SECRET}
+
+ GOTRUE_EXTERNAL_EMAIL_ENABLED: ${ENABLE_EMAIL_SIGNUP}
+ GOTRUE_EXTERNAL_ANONYMOUS_USERS_ENABLED: ${ENABLE_ANONYMOUS_USERS}
+ GOTRUE_MAILER_AUTOCONFIRM: ${ENABLE_EMAIL_AUTOCONFIRM}
+
+ # Uncomment to bypass nonce check in ID Token flow. Commonly set to true when using Google Sign In on mobile.
+ # GOTRUE_EXTERNAL_SKIP_NONCE_CHECK: true
+
+ # GOTRUE_MAILER_SECURE_EMAIL_CHANGE_ENABLED: true
+ # GOTRUE_SMTP_MAX_FREQUENCY: 1s
+ GOTRUE_SMTP_ADMIN_EMAIL: ${SMTP_ADMIN_EMAIL}
+ GOTRUE_SMTP_HOST: ${SMTP_HOST}
+ GOTRUE_SMTP_PORT: ${SMTP_PORT}
+ GOTRUE_SMTP_USER: ${SMTP_USER}
+ GOTRUE_SMTP_PASS: ${SMTP_PASS}
+ GOTRUE_SMTP_SENDER_NAME: ${SMTP_SENDER_NAME}
+ GOTRUE_MAILER_URLPATHS_INVITE: ${MAILER_URLPATHS_INVITE}
+ GOTRUE_MAILER_URLPATHS_CONFIRMATION: ${MAILER_URLPATHS_CONFIRMATION}
+ GOTRUE_MAILER_URLPATHS_RECOVERY: ${MAILER_URLPATHS_RECOVERY}
+ GOTRUE_MAILER_URLPATHS_EMAIL_CHANGE: ${MAILER_URLPATHS_EMAIL_CHANGE}
+
+ GOTRUE_EXTERNAL_PHONE_ENABLED: ${ENABLE_PHONE_SIGNUP}
+ GOTRUE_SMS_AUTOCONFIRM: ${ENABLE_PHONE_AUTOCONFIRM}
+
+ GOTRUE_MAILER_TEMPLATES_INVITE: ${MAILER_TEMPLATES_INVITE}
+ GOTRUE_MAILER_TEMPLATES_CONFIRMATION: ${MAILER_TEMPLATES_CONFIRMATION}
+ GOTRUE_MAILER_TEMPLATES_RECOVERY: ${MAILER_TEMPLATES_RECOVERY}
+ GOTRUE_MAILER_TEMPLATES_MAGIC_LINK: ${MAILER_TEMPLATES_MAGIC_LINK}
+ GOTRUE_MAILER_TEMPLATES_EMAIL_CHANGE: ${MAILER_TEMPLATES_EMAIL_CHANGE}
+
+ GOTRUE_MAILER_SUBJECTS_CONFIRMATION: ${MAILER_SUBJECTS_CONFIRMATION}
+ GOTRUE_MAILER_SUBJECTS_RECOVERY: ${MAILER_SUBJECTS_RECOVERY}
+ GOTRUE_MAILER_SUBJECTS_MAGIC_LINK: ${MAILER_SUBJECTS_MAGIC_LINK}
+ GOTRUE_MAILER_SUBJECTS_EMAIL_CHANGE: ${MAILER_SUBJECTS_EMAIL_CHANGE}
+ GOTRUE_MAILER_SUBJECTS_INVITE: ${MAILER_SUBJECTS_INVITE}
+
+ GOTRUE_EXTERNAL_APPLE_ENABLED: ${APPLE_ENABLED}
+ GOTRUE_EXTERNAL_APPLE_CLIENT_ID: ${APPLE_CLIENT_ID}
+ GOTRUE_EXTERNAL_APPLE_SECRET: ${APPLE_SECRET}
+ GOTRUE_EXTERNAL_APPLE_REDIRECT_URI: ${APPLE_REDIRECT_URI}
+
+ GOTRUE_EXTERNAL_AZURE_ENABLED: ${AZURE_ENABLED}
+ GOTRUE_EXTERNAL_AZURE_CLIENT_ID: ${AZURE_CLIENT_ID}
+ GOTRUE_EXTERNAL_AZURE_SECRET: ${AZURE_SECRET}
+ GOTRUE_EXTERNAL_AZURE_TENANT_ID: ${AZURE_TENANT_ID}
+ GOTRUE_EXTERNAL_AZURE_TENANT_URL: ${AZURE_TENANT_URL}
+ GOTRUE_EXTERNAL_AZURE_REDIRECT_URI: ${AZURE_REDIRECT_URI}
+
+ # Uncomment to enable custom access token hook. Please see: https://supabase.com/docs/guides/auth/auth-hooks for full list of hooks and additional details about custom_access_token_hook
+
+ # GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_ENABLED: "true"
+ # GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_URI: "pg-functions://postgres/public/custom_access_token_hook"
+ # GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_SECRETS: ""
+
+ # GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_ENABLED: "true"
+ # GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_URI: "pg-functions://postgres/public/mfa_verification_attempt"
+
+ # GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_ENABLED: "true"
+ # GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_URI: "pg-functions://postgres/public/password_verification_attempt"
+
+ # GOTRUE_HOOK_SEND_SMS_ENABLED: "false"
+ # GOTRUE_HOOK_SEND_SMS_URI: "pg-functions://postgres/public/custom_access_token_hook"
+ # GOTRUE_HOOK_SEND_SMS_SECRETS: "v1,whsec_VGhpcyBpcyBhbiBleGFtcGxlIG9mIGEgc2hvcnRlciBCYXNlNjQgc3RyaW5n"
+
+ # GOTRUE_HOOK_SEND_EMAIL_ENABLED: "false"
+ # GOTRUE_HOOK_SEND_EMAIL_URI: "http://host.docker.internal:54321/functions/v1/email_sender"
+ # GOTRUE_HOOK_SEND_EMAIL_SECRETS: "v1,whsec_VGhpcyBpcyBhbiBleGFtcGxlIG9mIGEgc2hvcnRlciBCYXNlNjQgc3RyaW5n"
+
+ rest:
+ container_name: supabase-rest
+ image: postgrest/postgrest:v12.2.12
+ networks: [supabase-network]
+ restart: unless-stopped
+ depends_on:
+ db:
+ # Disable this if you are using an external Postgres database
+ condition: service_healthy
+ analytics:
+ condition: service_healthy
+ environment:
+ PGRST_DB_URI: postgres://authenticator:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
+ PGRST_DB_SCHEMAS: ${PGRST_DB_SCHEMAS}
+ PGRST_DB_ANON_ROLE: anon
+ PGRST_JWT_SECRET: ${JWT_SECRET}
+ PGRST_DB_USE_LEGACY_GUCS: "false"
+ PGRST_APP_SETTINGS_JWT_SECRET: ${JWT_SECRET}
+ PGRST_APP_SETTINGS_JWT_EXP: ${JWT_EXPIRY}
+ command:
+ [
+ "postgrest"
+ ]
+
+ realtime:
+ # This container name looks inconsistent but is correct because realtime constructs tenant id by parsing the subdomain
+ container_name: realtime-dev.supabase-realtime
+ image: supabase/realtime:v2.34.47
+ networks: [supabase-network]
+ restart: unless-stopped
+ depends_on:
+ db:
+ # Disable this if you are using an external Postgres database
+ condition: service_healthy
+ analytics:
+ condition: service_healthy
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "curl",
+ "-sSfL",
+ "--head",
+ "-o",
+ "/dev/null",
+ "-H",
+ "Authorization: Bearer ${ANON_KEY}",
+ "http://localhost:4000/api/tenants/realtime-dev/health"
+ ]
+ timeout: 5s
+ interval: 5s
+ retries: 3
+ environment:
+ PORT: 4000
+ DB_HOST: ${POSTGRES_HOST}
+ DB_PORT: ${POSTGRES_PORT}
+ DB_USER: supabase_admin
+ DB_PASSWORD: ${POSTGRES_PASSWORD}
+ DB_NAME: ${POSTGRES_DB}
+ DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime'
+ DB_ENC_KEY: supabaserealtime
+ API_JWT_SECRET: ${JWT_SECRET}
+ SECRET_KEY_BASE: ${SECRET_KEY_BASE}
+ ERL_AFLAGS: -proto_dist inet_tcp
+ DNS_NODES: "''"
+ RLIMIT_NOFILE: "10000"
+ APP_NAME: realtime
+ SEED_SELF_HOST: true
+ RUN_JANITOR: true
+
+ # To use S3 backed storage: docker compose -f docker-compose.yml -f docker-compose.s3.yml up
+ storage:
+ container_name: supabase-storage
+ image: supabase/storage-api:v1.22.17
+ networks: [supabase-network]
+ restart: unless-stopped
+ volumes:
+ - ./volumes/storage:/var/lib/storage:z
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "wget",
+ "--no-verbose",
+ "--tries=1",
+ "--spider",
+ "http://storage:5000/status"
+ ]
+ timeout: 5s
+ interval: 5s
+ retries: 3
+ depends_on:
+ db:
+ # Disable this if you are using an external Postgres database
+ condition: service_healthy
+ rest:
+ condition: service_started
+ imgproxy:
+ condition: service_started
+ environment:
+ ANON_KEY: ${ANON_KEY}
+ SERVICE_KEY: ${SERVICE_ROLE_KEY}
+ POSTGREST_URL: http://rest:3000
+ PGRST_JWT_SECRET: ${JWT_SECRET}
+ DATABASE_URL: postgres://supabase_storage_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
+ FILE_SIZE_LIMIT: 52428800
+ STORAGE_BACKEND: file
+ FILE_STORAGE_BACKEND_PATH: /var/lib/storage
+ TENANT_ID: stub
+ # TODO: https://github.com/supabase/storage-api/issues/55
+ REGION: stub
+ GLOBAL_S3_BUCKET: stub
+ ENABLE_IMAGE_TRANSFORMATION: "true"
+ IMGPROXY_URL: http://imgproxy:5001
+
+ imgproxy:
+ container_name: supabase-imgproxy
+ image: darthsim/imgproxy:v3.8.0
+ networks: [supabase-network]
+ restart: unless-stopped
+ volumes:
+ - ./volumes/storage:/var/lib/storage:z
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "imgproxy",
+ "health"
+ ]
+ timeout: 5s
+ interval: 5s
+ retries: 3
+ environment:
+ IMGPROXY_BIND: ":5001"
+ IMGPROXY_LOCAL_FILESYSTEM_ROOT: /
+ IMGPROXY_USE_ETAG: "true"
+ IMGPROXY_ENABLE_WEBP_DETECTION: ${IMGPROXY_ENABLE_WEBP_DETECTION}
+
+ meta:
+ container_name: supabase-meta
+ image: supabase/postgres-meta:v0.89.0
+ networks: [supabase-network]
+ restart: unless-stopped
+ depends_on:
+ db:
+ # Disable this if you are using an external Postgres database
+ condition: service_healthy
+ analytics:
+ condition: service_healthy
+ environment:
+ PG_META_PORT: 8080
+ PG_META_DB_HOST: ${POSTGRES_HOST}
+ PG_META_DB_PORT: ${POSTGRES_PORT}
+ PG_META_DB_NAME: ${POSTGRES_DB}
+ PG_META_DB_USER: supabase_admin
+ PG_META_DB_PASSWORD: ${POSTGRES_PASSWORD}
+
+ functions:
+ container_name: supabase-edge-functions
+ image: supabase/edge-runtime:v1.67.4
+ networks: [supabase-network]
+ restart: unless-stopped
+ volumes:
+ - ./volumes/functions:/home/deno/functions:Z
+ depends_on:
+ analytics:
+ condition: service_healthy
+ environment:
+ JWT_SECRET: ${JWT_SECRET}
+ SUPABASE_URL: http://kong:8000
+ SUPABASE_ANON_KEY: ${ANON_KEY}
+ SUPABASE_SERVICE_ROLE_KEY: ${SERVICE_ROLE_KEY}
+ SUPABASE_DB_URL: postgresql://postgres:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
+ # TODO: Allow configuring VERIFY_JWT per function. This PR might help: https://github.com/supabase/cli/pull/786
+ VERIFY_JWT: "${FUNCTIONS_VERIFY_JWT}"
+ command:
+ [
+ "start",
+ "--main-service",
+ "/home/deno/functions/main"
+ ]
+
+ analytics:
+ container_name: supabase-analytics
+ image: supabase/logflare:1.12.0
+ networks: [supabase-network]
+ restart: unless-stopped
+ ports:
+ - 4000:4000
+ # Uncomment to use Big Query backend for analytics
+ # volumes:
+ # - type: bind
+ # source: ${PWD}/gcloud.json
+ # target: /opt/app/rel/logflare/bin/gcloud.json
+ # read_only: true
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "curl",
+ "http://localhost:4000/health"
+ ]
+ timeout: 5s
+ interval: 5s
+ retries: 10
+ depends_on:
+ db:
+ # Disable this if you are using an external Postgres database
+ condition: service_healthy
+ environment:
+ LOGFLARE_NODE_HOST: 127.0.0.1
+ DB_USERNAME: supabase_admin
+ DB_DATABASE: _supabase
+ DB_HOSTNAME: ${POSTGRES_HOST}
+ DB_PORT: ${POSTGRES_PORT}
+ DB_PASSWORD: ${POSTGRES_PASSWORD}
+ DB_SCHEMA: _analytics
+ LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
+ LOGFLARE_SINGLE_TENANT: true
+ LOGFLARE_SUPABASE_MODE: true
+ LOGFLARE_MIN_CLUSTER_SIZE: 1
+
+ # Comment variables to use Big Query backend for analytics
+ POSTGRES_BACKEND_URL: postgresql://supabase_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/_supabase
+ POSTGRES_BACKEND_SCHEMA: _analytics
+ LOGFLARE_FEATURE_FLAG_OVERRIDE: multibackend=true
+ # Uncomment to use Big Query backend for analytics
+ # GOOGLE_PROJECT_ID: ${GOOGLE_PROJECT_ID}
+ # GOOGLE_PROJECT_NUMBER: ${GOOGLE_PROJECT_NUMBER}
+
+ # Comment out everything below this point if you are using an external Postgres database
+ db:
+ container_name: supabase-db
+ image: supabase/postgres:15.8.1.060
+ networks: [supabase-network]
+ ports:
+ - ${POSTGRES_PORT}:${POSTGRES_PORT}
+ restart: unless-stopped
+ volumes:
+ - ./volumes/db/realtime.sql:/docker-entrypoint-initdb.d/migrations/99-realtime.sql:Z
+ # Must be superuser to create event trigger
+ - ./volumes/db/webhooks.sql:/docker-entrypoint-initdb.d/init-scripts/98-webhooks.sql:Z
+ # Must be superuser to alter reserved role
+ - ./volumes/db/roles.sql:/docker-entrypoint-initdb.d/init-scripts/99-roles.sql:Z
+ # Initialize the database settings with JWT_SECRET and JWT_EXP
+ - ./volumes/db/jwt.sql:/docker-entrypoint-initdb.d/init-scripts/99-jwt.sql:Z
+ # PGDATA directory is persisted between restarts
+ - ./volumes/db/data:/var/lib/postgresql/data:Z
+ # Changes required for internal supabase data such as _analytics
+ - ./volumes/db/_supabase.sql:/docker-entrypoint-initdb.d/migrations/97-_supabase.sql:Z
+ # Changes required for Analytics support
+ - ./volumes/db/logs.sql:/docker-entrypoint-initdb.d/migrations/99-logs.sql:Z
+ # Changes required for Pooler support
+ - ./volumes/db/pooler.sql:/docker-entrypoint-initdb.d/migrations/99-pooler.sql:Z
+ # Use named volume to persist pgsodium decryption key between restarts
+ - db-config:/etc/postgresql-custom
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "pg_isready",
+ "-U",
+ "postgres",
+ "-h",
+ "localhost"
+ ]
+ interval: 5s
+ timeout: 5s
+ retries: 10
+ depends_on:
+ vector:
+ condition: service_healthy
+ environment:
+ POSTGRES_HOST: /var/run/postgresql
+ PGPORT: ${POSTGRES_PORT}
+ POSTGRES_PORT: ${POSTGRES_PORT}
+ PGPASSWORD: ${POSTGRES_PASSWORD}
+ POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
+ PGDATABASE: ${POSTGRES_DB}
+ POSTGRES_DB: ${POSTGRES_DB}
+ JWT_SECRET: ${JWT_SECRET}
+ JWT_EXP: ${JWT_EXPIRY}
+ command:
+ [
+ "postgres",
+ "-c",
+ "config_file=/etc/postgresql/postgresql.conf",
+ "-c",
+ "log_min_messages=fatal" # prevents Realtime polling queries from appearing in logs
+ ]
+
+ vector:
+ container_name: supabase-vector
+ image: timberio/vector:0.28.1-alpine
+ networks: [supabase-network]
+ restart: unless-stopped
+ volumes:
+ - ./volumes/logs/vector.yml:/etc/vector/vector.yml:ro,z
+ - ${DOCKER_SOCKET_LOCATION}:/var/run/docker.sock:ro,z
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "wget",
+ "--no-verbose",
+ "--tries=1",
+ "--spider",
+ "http://vector:9001/health"
+ ]
+ timeout: 5s
+ interval: 5s
+ retries: 3
+ environment:
+ LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
+ command:
+ [
+ "--config",
+ "/etc/vector/vector.yml"
+ ]
+ security_opt:
+ - "label=disable"
+
+ # Update the DATABASE_URL if you are using an external Postgres database
+ supavisor:
+ container_name: supabase-pooler
+ image: supabase/supavisor:2.5.1
+ networks: [supabase-network]
+ restart: unless-stopped
+ ports:
+ #- ${POSTGRES_PORT}:5432
+ - ${POOLER_PROXY_PORT_TRANSACTION}:6543
+ volumes:
+ - ./volumes/pooler/pooler.exs:/etc/pooler/pooler.exs:ro,z
+ healthcheck:
+ test:
+ [
+ "CMD",
+ "curl",
+ "-sSfL",
+ "--head",
+ "-o",
+ "/dev/null",
+ "http://127.0.0.1:4000/api/health"
+ ]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ depends_on:
+ db:
+ condition: service_healthy
+ analytics:
+ condition: service_healthy
+ environment:
+ PORT: 4000
+ POSTGRES_PORT: ${POSTGRES_PORT}
+ POSTGRES_DB: ${POSTGRES_DB}
+ POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
+ DATABASE_URL: ecto://supabase_admin:${POSTGRES_PASSWORD}@db:${POSTGRES_PORT}/_supabase
+ CLUSTER_POSTGRES: true
+ SECRET_KEY_BASE: ${SECRET_KEY_BASE}
+ VAULT_ENC_KEY: ${VAULT_ENC_KEY}
+ API_JWT_SECRET: ${JWT_SECRET}
+ METRICS_JWT_SECRET: ${JWT_SECRET}
+ REGION: local
+ ERL_AFLAGS: -proto_dist inet_tcp
+ POOLER_TENANT_ID: ${POOLER_TENANT_ID}
+ POOLER_DEFAULT_POOL_SIZE: ${POOLER_DEFAULT_POOL_SIZE}
+ POOLER_MAX_CLIENT_CONN: ${POOLER_MAX_CLIENT_CONN}
+ POOLER_POOL_MODE: transaction
+ command:
+ [
+ "/bin/sh",
+ "-c",
+ "/app/bin/migrate && /app/bin/supavisor eval \"$$(cat /etc/pooler/pooler.exs)\" && /app/bin/server"
+ ]
+
+volumes:
+ db-config:
diff --git a/src/server/docker/volumes/api/kong.yml b/src/server/docker/volumes/api/kong.yml
new file mode 100644
index 0000000..7abf425
--- /dev/null
+++ b/src/server/docker/volumes/api/kong.yml
@@ -0,0 +1,241 @@
+_format_version: '2.1'
+_transform: true
+
+###
+### Consumers / Users
+###
+consumers:
+ - username: DASHBOARD
+ - username: anon
+ keyauth_credentials:
+ - key: $SUPABASE_ANON_KEY
+ - username: service_role
+ keyauth_credentials:
+ - key: $SUPABASE_SERVICE_KEY
+
+###
+### Access Control List
+###
+acls:
+ - consumer: anon
+ group: anon
+ - consumer: service_role
+ group: admin
+
+###
+### Dashboard credentials
+###
+basicauth_credentials:
+ - consumer: DASHBOARD
+ username: $DASHBOARD_USERNAME
+ password: $DASHBOARD_PASSWORD
+
+###
+### API Routes
+###
+services:
+ ## Open Auth routes
+ - name: auth-v1-open
+ url: http://auth:9999/verify
+ routes:
+ - name: auth-v1-open
+ strip_path: true
+ paths:
+ - /auth/v1/verify
+ plugins:
+ - name: cors
+ - name: auth-v1-open-callback
+ url: http://auth:9999/callback
+ routes:
+ - name: auth-v1-open-callback
+ strip_path: true
+ paths:
+ - /auth/v1/callback
+ plugins:
+ - name: cors
+ - name: auth-v1-open-authorize
+ url: http://auth:9999/authorize
+ routes:
+ - name: auth-v1-open-authorize
+ strip_path: true
+ paths:
+ - /auth/v1/authorize
+ plugins:
+ - name: cors
+
+ ## Secure Auth routes
+ - name: auth-v1
+ _comment: 'GoTrue: /auth/v1/* -> http://auth:9999/*'
+ url: http://auth:9999/
+ routes:
+ - name: auth-v1-all
+ strip_path: true
+ paths:
+ - /auth/v1/
+ plugins:
+ - name: cors
+ - name: key-auth
+ config:
+ hide_credentials: false
+ - name: acl
+ config:
+ hide_groups_header: true
+ allow:
+ - admin
+ - anon
+
+ ## Secure REST routes
+ - name: rest-v1
+ _comment: 'PostgREST: /rest/v1/* -> http://rest:3000/*'
+ url: http://rest:3000/
+ routes:
+ - name: rest-v1-all
+ strip_path: true
+ paths:
+ - /rest/v1/
+ plugins:
+ - name: cors
+ - name: key-auth
+ config:
+ hide_credentials: true
+ - name: acl
+ config:
+ hide_groups_header: true
+ allow:
+ - admin
+ - anon
+
+ ## Secure GraphQL routes
+ - name: graphql-v1
+ _comment: 'PostgREST: /graphql/v1/* -> http://rest:3000/rpc/graphql'
+ url: http://rest:3000/rpc/graphql
+ routes:
+ - name: graphql-v1-all
+ strip_path: true
+ paths:
+ - /graphql/v1
+ plugins:
+ - name: cors
+ - name: key-auth
+ config:
+ hide_credentials: true
+ - name: request-transformer
+ config:
+ add:
+ headers:
+ - Content-Profile:graphql_public
+ - name: acl
+ config:
+ hide_groups_header: true
+ allow:
+ - admin
+ - anon
+
+ ## Secure Realtime routes
+ - name: realtime-v1-ws
+ _comment: 'Realtime: /realtime/v1/* -> ws://realtime:4000/socket/*'
+ url: http://realtime-dev.supabase-realtime:4000/socket
+ protocol: ws
+ routes:
+ - name: realtime-v1-ws
+ strip_path: true
+ paths:
+ - /realtime/v1/
+ plugins:
+ - name: cors
+ - name: key-auth
+ config:
+ hide_credentials: false
+ - name: acl
+ config:
+ hide_groups_header: true
+ allow:
+ - admin
+ - anon
+ - name: realtime-v1-rest
+ _comment: 'Realtime: /realtime/v1/* -> ws://realtime:4000/socket/*'
+ url: http://realtime-dev.supabase-realtime:4000/api
+ protocol: http
+ routes:
+ - name: realtime-v1-rest
+ strip_path: true
+ paths:
+ - /realtime/v1/api
+ plugins:
+ - name: cors
+ - name: key-auth
+ config:
+ hide_credentials: false
+ - name: acl
+ config:
+ hide_groups_header: true
+ allow:
+ - admin
+ - anon
+ ## Storage routes: the storage server manages its own auth
+ - name: storage-v1
+ _comment: 'Storage: /storage/v1/* -> http://storage:5000/*'
+ url: http://storage:5000/
+ routes:
+ - name: storage-v1-all
+ strip_path: true
+ paths:
+ - /storage/v1/
+ plugins:
+ - name: cors
+
+ ## Edge Functions routes
+ - name: functions-v1
+ _comment: 'Edge Functions: /functions/v1/* -> http://functions:9000/*'
+ url: http://functions:9000/
+ routes:
+ - name: functions-v1-all
+ strip_path: true
+ paths:
+ - /functions/v1/
+ plugins:
+ - name: cors
+
+ ## Analytics routes
+ - name: analytics-v1
+ _comment: 'Analytics: /analytics/v1/* -> http://logflare:4000/*'
+ url: http://analytics:4000/
+ routes:
+ - name: analytics-v1-all
+ strip_path: true
+ paths:
+ - /analytics/v1/
+
+ ## Secure Database routes
+ - name: meta
+ _comment: 'pg-meta: /pg/* -> http://pg-meta:8080/*'
+ url: http://meta:8080/
+ routes:
+ - name: meta-all
+ strip_path: true
+ paths:
+ - /pg/
+ plugins:
+ - name: key-auth
+ config:
+ hide_credentials: false
+ - name: acl
+ config:
+ hide_groups_header: true
+ allow:
+ - admin
+
+ ## Protected Dashboard - catch all remaining routes
+ - name: dashboard
+ _comment: 'Studio: /* -> http://studio:3000/*'
+ url: http://studio:3000/
+ routes:
+ - name: dashboard-all
+ strip_path: true
+ paths:
+ - /
+ plugins:
+ - name: cors
+ - name: basic-auth
+ config:
+ hide_credentials: true
diff --git a/src/server/docker/volumes/db/_supabase.sql b/src/server/docker/volumes/db/_supabase.sql
new file mode 100644
index 0000000..6236ae1
--- /dev/null
+++ b/src/server/docker/volumes/db/_supabase.sql
@@ -0,0 +1,3 @@
+\set pguser `echo "$POSTGRES_USER"`
+
+CREATE DATABASE _supabase WITH OWNER :pguser;
diff --git a/src/server/docker/volumes/db/init/data.sql b/src/server/docker/volumes/db/init/data.sql
new file mode 100755
index 0000000..e69de29
diff --git a/src/server/docker/volumes/db/jwt.sql b/src/server/docker/volumes/db/jwt.sql
new file mode 100644
index 0000000..cfd3b16
--- /dev/null
+++ b/src/server/docker/volumes/db/jwt.sql
@@ -0,0 +1,5 @@
+\set jwt_secret `echo "$JWT_SECRET"`
+\set jwt_exp `echo "$JWT_EXP"`
+
+ALTER DATABASE postgres SET "app.settings.jwt_secret" TO :'jwt_secret';
+ALTER DATABASE postgres SET "app.settings.jwt_exp" TO :'jwt_exp';
diff --git a/src/server/docker/volumes/db/logs.sql b/src/server/docker/volumes/db/logs.sql
new file mode 100644
index 0000000..255c0f4
--- /dev/null
+++ b/src/server/docker/volumes/db/logs.sql
@@ -0,0 +1,6 @@
+\set pguser `echo "$POSTGRES_USER"`
+
+\c _supabase
+create schema if not exists _analytics;
+alter schema _analytics owner to :pguser;
+\c postgres
diff --git a/src/server/docker/volumes/db/pooler.sql b/src/server/docker/volumes/db/pooler.sql
new file mode 100644
index 0000000..162c5b9
--- /dev/null
+++ b/src/server/docker/volumes/db/pooler.sql
@@ -0,0 +1,6 @@
+\set pguser `echo "$POSTGRES_USER"`
+
+\c _supabase
+create schema if not exists _supavisor;
+alter schema _supavisor owner to :pguser;
+\c postgres
diff --git a/src/server/docker/volumes/db/realtime.sql b/src/server/docker/volumes/db/realtime.sql
new file mode 100644
index 0000000..4d4b9ff
--- /dev/null
+++ b/src/server/docker/volumes/db/realtime.sql
@@ -0,0 +1,4 @@
+\set pguser `echo "$POSTGRES_USER"`
+
+create schema if not exists _realtime;
+alter schema _realtime owner to :pguser;
diff --git a/src/server/docker/volumes/db/roles.sql b/src/server/docker/volumes/db/roles.sql
new file mode 100644
index 0000000..8f7161a
--- /dev/null
+++ b/src/server/docker/volumes/db/roles.sql
@@ -0,0 +1,8 @@
+-- NOTE: change to your own passwords for production environments
+\set pgpass `echo "$POSTGRES_PASSWORD"`
+
+ALTER USER authenticator WITH PASSWORD :'pgpass';
+ALTER USER pgbouncer WITH PASSWORD :'pgpass';
+ALTER USER supabase_auth_admin WITH PASSWORD :'pgpass';
+ALTER USER supabase_functions_admin WITH PASSWORD :'pgpass';
+ALTER USER supabase_storage_admin WITH PASSWORD :'pgpass';
diff --git a/src/server/docker/volumes/db/webhooks.sql b/src/server/docker/volumes/db/webhooks.sql
new file mode 100644
index 0000000..5837b86
--- /dev/null
+++ b/src/server/docker/volumes/db/webhooks.sql
@@ -0,0 +1,208 @@
+BEGIN;
+ -- Create pg_net extension
+ CREATE EXTENSION IF NOT EXISTS pg_net SCHEMA extensions;
+ -- Create supabase_functions schema
+ CREATE SCHEMA supabase_functions AUTHORIZATION supabase_admin;
+ GRANT USAGE ON SCHEMA supabase_functions TO postgres, anon, authenticated, service_role;
+ ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON TABLES TO postgres, anon, authenticated, service_role;
+ ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON FUNCTIONS TO postgres, anon, authenticated, service_role;
+ ALTER DEFAULT PRIVILEGES IN SCHEMA supabase_functions GRANT ALL ON SEQUENCES TO postgres, anon, authenticated, service_role;
+ -- supabase_functions.migrations definition
+ CREATE TABLE supabase_functions.migrations (
+ version text PRIMARY KEY,
+ inserted_at timestamptz NOT NULL DEFAULT NOW()
+ );
+ -- Initial supabase_functions migration
+ INSERT INTO supabase_functions.migrations (version) VALUES ('initial');
+ -- supabase_functions.hooks definition
+ CREATE TABLE supabase_functions.hooks (
+ id bigserial PRIMARY KEY,
+ hook_table_id integer NOT NULL,
+ hook_name text NOT NULL,
+ created_at timestamptz NOT NULL DEFAULT NOW(),
+ request_id bigint
+ );
+ CREATE INDEX supabase_functions_hooks_request_id_idx ON supabase_functions.hooks USING btree (request_id);
+ CREATE INDEX supabase_functions_hooks_h_table_id_h_name_idx ON supabase_functions.hooks USING btree (hook_table_id, hook_name);
+ COMMENT ON TABLE supabase_functions.hooks IS 'Supabase Functions Hooks: Audit trail for triggered hooks.';
+ CREATE FUNCTION supabase_functions.http_request()
+ RETURNS trigger
+ LANGUAGE plpgsql
+ AS $function$
+ DECLARE
+ request_id bigint;
+ payload jsonb;
+ url text := TG_ARGV[0]::text;
+ method text := TG_ARGV[1]::text;
+ headers jsonb DEFAULT '{}'::jsonb;
+ params jsonb DEFAULT '{}'::jsonb;
+ timeout_ms integer DEFAULT 1000;
+ BEGIN
+ IF url IS NULL OR url = 'null' THEN
+ RAISE EXCEPTION 'url argument is missing';
+ END IF;
+
+ IF method IS NULL OR method = 'null' THEN
+ RAISE EXCEPTION 'method argument is missing';
+ END IF;
+
+ IF TG_ARGV[2] IS NULL OR TG_ARGV[2] = 'null' THEN
+ headers = '{"Content-Type": "application/json"}'::jsonb;
+ ELSE
+ headers = TG_ARGV[2]::jsonb;
+ END IF;
+
+ IF TG_ARGV[3] IS NULL OR TG_ARGV[3] = 'null' THEN
+ params = '{}'::jsonb;
+ ELSE
+ params = TG_ARGV[3]::jsonb;
+ END IF;
+
+ IF TG_ARGV[4] IS NULL OR TG_ARGV[4] = 'null' THEN
+ timeout_ms = 1000;
+ ELSE
+ timeout_ms = TG_ARGV[4]::integer;
+ END IF;
+
+ CASE
+ WHEN method = 'GET' THEN
+ SELECT http_get INTO request_id FROM net.http_get(
+ url,
+ params,
+ headers,
+ timeout_ms
+ );
+ WHEN method = 'POST' THEN
+ payload = jsonb_build_object(
+ 'old_record', OLD,
+ 'record', NEW,
+ 'type', TG_OP,
+ 'table', TG_TABLE_NAME,
+ 'schema', TG_TABLE_SCHEMA
+ );
+
+ SELECT http_post INTO request_id FROM net.http_post(
+ url,
+ payload,
+ params,
+ headers,
+ timeout_ms
+ );
+ ELSE
+ RAISE EXCEPTION 'method argument % is invalid', method;
+ END CASE;
+
+ INSERT INTO supabase_functions.hooks
+ (hook_table_id, hook_name, request_id)
+ VALUES
+ (TG_RELID, TG_NAME, request_id);
+
+ RETURN NEW;
+ END
+ $function$;
+ -- Supabase super admin
+ DO
+ $$
+ BEGIN
+ IF NOT EXISTS (
+ SELECT 1
+ FROM pg_roles
+ WHERE rolname = 'supabase_functions_admin'
+ )
+ THEN
+ CREATE USER supabase_functions_admin NOINHERIT CREATEROLE LOGIN NOREPLICATION;
+ END IF;
+ END
+ $$;
+ GRANT ALL PRIVILEGES ON SCHEMA supabase_functions TO supabase_functions_admin;
+ GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA supabase_functions TO supabase_functions_admin;
+ GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA supabase_functions TO supabase_functions_admin;
+ ALTER USER supabase_functions_admin SET search_path = "supabase_functions";
+ ALTER table "supabase_functions".migrations OWNER TO supabase_functions_admin;
+ ALTER table "supabase_functions".hooks OWNER TO supabase_functions_admin;
+ ALTER function "supabase_functions".http_request() OWNER TO supabase_functions_admin;
+ GRANT supabase_functions_admin TO postgres;
+ -- Remove unused supabase_pg_net_admin role
+ DO
+ $$
+ BEGIN
+ IF EXISTS (
+ SELECT 1
+ FROM pg_roles
+ WHERE rolname = 'supabase_pg_net_admin'
+ )
+ THEN
+ REASSIGN OWNED BY supabase_pg_net_admin TO supabase_admin;
+ DROP OWNED BY supabase_pg_net_admin;
+ DROP ROLE supabase_pg_net_admin;
+ END IF;
+ END
+ $$;
+ -- pg_net grants when extension is already enabled
+ DO
+ $$
+ BEGIN
+ IF EXISTS (
+ SELECT 1
+ FROM pg_extension
+ WHERE extname = 'pg_net'
+ )
+ THEN
+ GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role;
+ ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
+ ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
+ ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
+ ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
+ REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
+ REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
+ GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
+ GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
+ END IF;
+ END
+ $$;
+ -- Event trigger for pg_net
+ CREATE OR REPLACE FUNCTION extensions.grant_pg_net_access()
+ RETURNS event_trigger
+ LANGUAGE plpgsql
+ AS $$
+ BEGIN
+ IF EXISTS (
+ SELECT 1
+ FROM pg_event_trigger_ddl_commands() AS ev
+ JOIN pg_extension AS ext
+ ON ev.objid = ext.oid
+ WHERE ext.extname = 'pg_net'
+ )
+ THEN
+ GRANT USAGE ON SCHEMA net TO supabase_functions_admin, postgres, anon, authenticated, service_role;
+ ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
+ ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SECURITY DEFINER;
+ ALTER function net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
+ ALTER function net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) SET search_path = net;
+ REVOKE ALL ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
+ REVOKE ALL ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) FROM PUBLIC;
+ GRANT EXECUTE ON FUNCTION net.http_get(url text, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
+ GRANT EXECUTE ON FUNCTION net.http_post(url text, body jsonb, params jsonb, headers jsonb, timeout_milliseconds integer) TO supabase_functions_admin, postgres, anon, authenticated, service_role;
+ END IF;
+ END;
+ $$;
+ COMMENT ON FUNCTION extensions.grant_pg_net_access IS 'Grants access to pg_net';
+ DO
+ $$
+ BEGIN
+ IF NOT EXISTS (
+ SELECT 1
+ FROM pg_event_trigger
+ WHERE evtname = 'issue_pg_net_access'
+ ) THEN
+ CREATE EVENT TRIGGER issue_pg_net_access ON ddl_command_end WHEN TAG IN ('CREATE EXTENSION')
+ EXECUTE PROCEDURE extensions.grant_pg_net_access();
+ END IF;
+ END
+ $$;
+ INSERT INTO supabase_functions.migrations (version) VALUES ('20210809183423_update_grants');
+ ALTER function supabase_functions.http_request() SECURITY DEFINER;
+ ALTER function supabase_functions.http_request() SET search_path = supabase_functions;
+ REVOKE ALL ON FUNCTION supabase_functions.http_request() FROM PUBLIC;
+ GRANT EXECUTE ON FUNCTION supabase_functions.http_request() TO postgres, anon, authenticated, service_role;
+COMMIT;
diff --git a/src/server/docker/volumes/functions/hello/index.ts b/src/server/docker/volumes/functions/hello/index.ts
new file mode 100644
index 0000000..7ae5cc1
--- /dev/null
+++ b/src/server/docker/volumes/functions/hello/index.ts
@@ -0,0 +1,15 @@
+// Follow this setup guide to integrate the Deno language server with your editor:
+// https://deno.land/manual/getting_started/setup_your_environment
+// This enables autocomplete, go to definition, etc.
+
+import { serve } from 'https://deno.land/std@0.177.1/http/server.ts';
+
+serve(async () => {
+ return new Response(`"Hello from Edge Functions!"`, {
+ headers: { 'Content-Type': 'application/json' },
+ });
+});
+
+// To invoke:
+// curl 'http://localhost:/functions/v1/hello' \
+// --header 'Authorization: Bearer '
diff --git a/src/server/docker/volumes/functions/main/index.ts b/src/server/docker/volumes/functions/main/index.ts
new file mode 100644
index 0000000..291c1e0
--- /dev/null
+++ b/src/server/docker/volumes/functions/main/index.ts
@@ -0,0 +1,94 @@
+import { serve } from 'https://deno.land/std@0.131.0/http/server.ts';
+import * as jose from 'https://deno.land/x/jose@v4.14.4/index.ts';
+
+console.log('main function started');
+
+const JWT_SECRET = Deno.env.get('JWT_SECRET');
+const VERIFY_JWT = Deno.env.get('VERIFY_JWT') === 'true';
+
+function getAuthToken(req: Request) {
+ const authHeader = req.headers.get('authorization');
+ if (!authHeader) {
+ throw new Error('Missing authorization header');
+ }
+ const [bearer, token] = authHeader.split(' ');
+ if (bearer !== 'Bearer') {
+ throw new Error(`Auth header is not 'Bearer {token}'`);
+ }
+ return token;
+}
+
+async function verifyJWT(jwt: string): Promise {
+ const encoder = new TextEncoder();
+ const secretKey = encoder.encode(JWT_SECRET);
+ try {
+ await jose.jwtVerify(jwt, secretKey);
+ } catch (err) {
+ console.error(err);
+ return false;
+ }
+ return true;
+}
+
+serve(async (req: Request) => {
+ if (req.method !== 'OPTIONS' && VERIFY_JWT) {
+ try {
+ const token = getAuthToken(req);
+ const isValidJWT = await verifyJWT(token);
+
+ if (!isValidJWT) {
+ return new Response(JSON.stringify({ msg: 'Invalid JWT' }), {
+ status: 401,
+ headers: { 'Content-Type': 'application/json' },
+ });
+ }
+ } catch (e) {
+ console.error(e);
+ return new Response(JSON.stringify({ msg: e.toString() }), {
+ status: 401,
+ headers: { 'Content-Type': 'application/json' },
+ });
+ }
+ }
+
+ const url = new URL(req.url);
+ const { pathname } = url;
+ const path_parts = pathname.split('/');
+ const service_name = path_parts[1];
+
+ if (!service_name || service_name === '') {
+ const error = { msg: 'missing function name in request' };
+ return new Response(JSON.stringify(error), {
+ status: 400,
+ headers: { 'Content-Type': 'application/json' },
+ });
+ }
+
+ const servicePath = `/home/deno/functions/${service_name}`;
+ console.error(`serving the request with ${servicePath}`);
+
+ const memoryLimitMb = 150;
+ const workerTimeoutMs = 1 * 60 * 1000;
+ const noModuleCache = false;
+ const importMapPath = null;
+ const envVarsObj = Deno.env.toObject();
+ const envVars = Object.keys(envVarsObj).map((k) => [k, envVarsObj[k]]);
+
+ try {
+ const worker = await EdgeRuntime.userWorkers.create({
+ servicePath,
+ memoryLimitMb,
+ workerTimeoutMs,
+ noModuleCache,
+ importMapPath,
+ envVars,
+ });
+ return await worker.fetch(req);
+ } catch (e) {
+ const error = { msg: e.toString() };
+ return new Response(JSON.stringify(error), {
+ status: 500,
+ headers: { 'Content-Type': 'application/json' },
+ });
+ }
+});
diff --git a/src/server/docker/volumes/logs/vector.yml b/src/server/docker/volumes/logs/vector.yml
new file mode 100644
index 0000000..cce46df
--- /dev/null
+++ b/src/server/docker/volumes/logs/vector.yml
@@ -0,0 +1,232 @@
+api:
+ enabled: true
+ address: 0.0.0.0:9001
+
+sources:
+ docker_host:
+ type: docker_logs
+ exclude_containers:
+ - supabase-vector
+
+transforms:
+ project_logs:
+ type: remap
+ inputs:
+ - docker_host
+ source: |-
+ .project = "default"
+ .event_message = del(.message)
+ .appname = del(.container_name)
+ del(.container_created_at)
+ del(.container_id)
+ del(.source_type)
+ del(.stream)
+ del(.label)
+ del(.image)
+ del(.host)
+ del(.stream)
+ router:
+ type: route
+ inputs:
+ - project_logs
+ route:
+ kong: '.appname == "supabase-kong"'
+ auth: '.appname == "supabase-auth"'
+ rest: '.appname == "supabase-rest"'
+ realtime: '.appname == "supabase-realtime"'
+ storage: '.appname == "supabase-storage"'
+ functions: '.appname == "supabase-functions"'
+ db: '.appname == "supabase-db"'
+ # Ignores non nginx errors since they are related with kong booting up
+ kong_logs:
+ type: remap
+ inputs:
+ - router.kong
+ source: |-
+ req, err = parse_nginx_log(.event_message, "combined")
+ if err == null {
+ .timestamp = req.timestamp
+ .metadata.request.headers.referer = req.referer
+ .metadata.request.headers.user_agent = req.agent
+ .metadata.request.headers.cf_connecting_ip = req.client
+ .metadata.request.method = req.method
+ .metadata.request.path = req.path
+ .metadata.request.protocol = req.protocol
+ .metadata.response.status_code = req.status
+ }
+ if err != null {
+ abort
+ }
+ # Ignores non nginx errors since they are related with kong booting up
+ kong_err:
+ type: remap
+ inputs:
+ - router.kong
+ source: |-
+ .metadata.request.method = "GET"
+ .metadata.response.status_code = 200
+ parsed, err = parse_nginx_log(.event_message, "error")
+ if err == null {
+ .timestamp = parsed.timestamp
+ .severity = parsed.severity
+ .metadata.request.host = parsed.host
+ .metadata.request.headers.cf_connecting_ip = parsed.client
+ url, err = split(parsed.request, " ")
+ if err == null {
+ .metadata.request.method = url[0]
+ .metadata.request.path = url[1]
+ .metadata.request.protocol = url[2]
+ }
+ }
+ if err != null {
+ abort
+ }
+ # Gotrue logs are structured json strings which frontend parses directly. But we keep metadata for consistency.
+ auth_logs:
+ type: remap
+ inputs:
+ - router.auth
+ source: |-
+ parsed, err = parse_json(.event_message)
+ if err == null {
+ .metadata.timestamp = parsed.time
+ .metadata = merge!(.metadata, parsed)
+ }
+ # PostgREST logs are structured so we separate timestamp from message using regex
+ rest_logs:
+ type: remap
+ inputs:
+ - router.rest
+ source: |-
+ parsed, err = parse_regex(.event_message, r'^(?P.*): (?P.*)$')
+ if err == null {
+ .event_message = parsed.msg
+ .timestamp = to_timestamp!(parsed.time)
+ .metadata.host = .project
+ }
+ # Realtime logs are structured so we parse the severity level using regex (ignore time because it has no date)
+ realtime_logs:
+ type: remap
+ inputs:
+ - router.realtime
+ source: |-
+ .metadata.project = del(.project)
+ .metadata.external_id = .metadata.project
+ parsed, err = parse_regex(.event_message, r'^(?P\d+:\d+:\d+\.\d+) \[(?P\w+)\] (?P.*)$')
+ if err == null {
+ .event_message = parsed.msg
+ .metadata.level = parsed.level
+ }
+ # Storage logs may contain json objects so we parse them for completeness
+ storage_logs:
+ type: remap
+ inputs:
+ - router.storage
+ source: |-
+ .metadata.project = del(.project)
+ .metadata.tenantId = .metadata.project
+ parsed, err = parse_json(.event_message)
+ if err == null {
+ .event_message = parsed.msg
+ .metadata.level = parsed.level
+ .metadata.timestamp = parsed.time
+ .metadata.context[0].host = parsed.hostname
+ .metadata.context[0].pid = parsed.pid
+ }
+ # Postgres logs some messages to stderr which we map to warning severity level
+ db_logs:
+ type: remap
+ inputs:
+ - router.db
+ source: |-
+ .metadata.host = "db-default"
+ .metadata.parsed.timestamp = .timestamp
+
+ parsed, err = parse_regex(.event_message, r'.*(?PINFO|NOTICE|WARNING|ERROR|LOG|FATAL|PANIC?):.*', numeric_groups: true)
+
+ if err != null || parsed == null {
+ .metadata.parsed.error_severity = "info"
+ }
+ if parsed != null {
+ .metadata.parsed.error_severity = parsed.level
+ }
+ if .metadata.parsed.error_severity == "info" {
+ .metadata.parsed.error_severity = "log"
+ }
+ .metadata.parsed.error_severity = upcase!(.metadata.parsed.error_severity)
+
+sinks:
+ logflare_auth:
+ type: 'http'
+ inputs:
+ - auth_logs
+ encoding:
+ codec: 'json'
+ method: 'post'
+ request:
+ retry_max_duration_secs: 10
+ uri: 'http://analytics:4000/api/logs?source_name=gotrue.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
+ logflare_realtime:
+ type: 'http'
+ inputs:
+ - realtime_logs
+ encoding:
+ codec: 'json'
+ method: 'post'
+ request:
+ retry_max_duration_secs: 10
+ uri: 'http://analytics:4000/api/logs?source_name=realtime.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
+ logflare_rest:
+ type: 'http'
+ inputs:
+ - rest_logs
+ encoding:
+ codec: 'json'
+ method: 'post'
+ request:
+ retry_max_duration_secs: 10
+ uri: 'http://analytics:4000/api/logs?source_name=postgREST.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
+ logflare_db:
+ type: 'http'
+ inputs:
+ - db_logs
+ encoding:
+ codec: 'json'
+ method: 'post'
+ request:
+ retry_max_duration_secs: 10
+ # We must route the sink through kong because ingesting logs before logflare is fully initialised will
+ # lead to broken queries from studio. This works by the assumption that containers are started in the
+ # following order: vector > db > logflare > kong
+ uri: 'http://kong:8000/analytics/v1/api/logs?source_name=postgres.logs&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
+ logflare_functions:
+ type: 'http'
+ inputs:
+ - router.functions
+ encoding:
+ codec: 'json'
+ method: 'post'
+ request:
+ retry_max_duration_secs: 10
+ uri: 'http://analytics:4000/api/logs?source_name=deno-relay-logs&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
+ logflare_storage:
+ type: 'http'
+ inputs:
+ - storage_logs
+ encoding:
+ codec: 'json'
+ method: 'post'
+ request:
+ retry_max_duration_secs: 10
+ uri: 'http://analytics:4000/api/logs?source_name=storage.logs.prod.2&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
+ logflare_kong:
+ type: 'http'
+ inputs:
+ - kong_logs
+ - kong_err
+ encoding:
+ codec: 'json'
+ method: 'post'
+ request:
+ retry_max_duration_secs: 10
+ uri: 'http://analytics:4000/api/logs?source_name=cloudflare.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
diff --git a/src/server/docker/volumes/pooler/pooler.exs b/src/server/docker/volumes/pooler/pooler.exs
new file mode 100644
index 0000000..791d61c
--- /dev/null
+++ b/src/server/docker/volumes/pooler/pooler.exs
@@ -0,0 +1,30 @@
+{:ok, _} = Application.ensure_all_started(:supavisor)
+
+{:ok, version} =
+ case Supavisor.Repo.query!("select version()") do
+ %{rows: [[ver]]} -> Supavisor.Helpers.parse_pg_version(ver)
+ _ -> nil
+ end
+
+params = %{
+ "external_id" => System.get_env("POOLER_TENANT_ID"),
+ "db_host" => "db",
+ "db_port" => System.get_env("POSTGRES_PORT"),
+ "db_database" => System.get_env("POSTGRES_DB"),
+ "require_user" => false,
+ "auth_query" => "SELECT * FROM pgbouncer.get_auth($1)",
+ "default_max_clients" => System.get_env("POOLER_MAX_CLIENT_CONN"),
+ "default_pool_size" => System.get_env("POOLER_DEFAULT_POOL_SIZE"),
+ "default_parameter_status" => %{"server_version" => version},
+ "users" => [%{
+ "db_user" => "pgbouncer",
+ "db_password" => System.get_env("POSTGRES_PASSWORD"),
+ "mode_type" => System.get_env("POOLER_POOL_MODE"),
+ "pool_size" => System.get_env("POOLER_DEFAULT_POOL_SIZE"),
+ "is_manager" => true
+ }]
+}
+
+if !Supavisor.Tenants.get_tenant_by_external_id(params["external_id"]) do
+ {:ok, _} = Supavisor.Tenants.create_tenant(params)
+end
diff --git a/src/server/mail_templates/change_email_address.html b/src/server/mail_templates/change_email_address.html
new file mode 100644
index 0000000..ba090da
--- /dev/null
+++ b/src/server/mail_templates/change_email_address.html
@@ -0,0 +1,43 @@
+
+
+
+ Change Email Address
+
+
+
+
+
+
+
+
+
+
+ Tech Tracker
+
+
+
+
+
+
Confirm Change of Email
+
+
Hello,
+
+
We received a request to change your email address from {{ .Email }} to {{ .NewEmail }} .
+
+
To confirm this change, please click the button below:
+
+
+
+
If you didn't request this change, please contact support immediately.
+
+
+
Tech Tracker - City of Gulfport
+
+
+
+
diff --git a/src/server/mail_templates/confirm_signup.html b/src/server/mail_templates/confirm_signup.html
new file mode 100644
index 0000000..f068df8
--- /dev/null
+++ b/src/server/mail_templates/confirm_signup.html
@@ -0,0 +1,41 @@
+
+
+
+ Confirm Your Email
+
+
+
+
+
+
+
+
+
+
+ Tech Tracker
+
+
+
+
+
+
Confirm Your Email
+
+
Hello,
+
+
Thank you for signing up for Tech Tracker. To complete your registration, please confirm your email address by clicking the button below:
+
+
+
+
If you didn't create an account with Tech Tracker, you can safely ignore this email.
+
+
+
Tech Tracker - City of Gulfport
+
+
+
+
diff --git a/src/server/mail_templates/invite_user.html b/src/server/mail_templates/invite_user.html
new file mode 100644
index 0000000..7d4c1ea
--- /dev/null
+++ b/src/server/mail_templates/invite_user.html
@@ -0,0 +1,41 @@
+
+
+
+ You've Been Invited!
+
+
+
+
+
+
+
+
+
+
+ Tech Tracker
+
+
+
+
+
+
You've Been Invited
+
+
Hello,
+
+
You have been invited to join Tech Tracker. To accept this invitation and create your account, please click the button below:
+
+
+
+
Tech Tracker helps teams manage their projects efficiently. We're excited to have you on board!
+
+
+
Tech Tracker - City of Gulfport
+
+
+
+
diff --git a/src/server/mail_templates/magic_link.html b/src/server/mail_templates/magic_link.html
new file mode 100644
index 0000000..9dde3dc
--- /dev/null
+++ b/src/server/mail_templates/magic_link.html
@@ -0,0 +1,43 @@
+
+
+
+ Magic Sign In Link
+
+
+
+
+
+
+
+
+
+
+ Tech Tracker
+
+
+
+
+
+
Your Magic Link
+
+
Hello,
+
+
You requested a magic link to sign in to your Tech Tracker account. Click the button below to sign in:
+
+
+
+
This link will expire in 1 hour and can only be used once.
+
+
If you didn't request this magic link, you can safely ignore this email.
+
+
+
Tech Tracker - City of Gulfport
+
+
+
+
diff --git a/src/server/mail_templates/reauthentication.html b/src/server/mail_templates/reauthentication.html
new file mode 100644
index 0000000..6b5d97c
--- /dev/null
+++ b/src/server/mail_templates/reauthentication.html
@@ -0,0 +1,42 @@
+
+
+
+ Confirm Reauthentication
+
+
+
+
+
+
+
+
+
+
+ Tech Tracker
+
+
+
+
+
+
Confirm Reauthentication
+
+
Hello,
+
+
For security reasons, we need to verify your identity. Please enter the following code when prompted:
+
+
+
+
This code will expire in 10 minutes.
+
+
If you didn't request this code, please secure your account by changing your password immediately.
+
+
+
Tech Tracker - City of Gulfport
+
+
+
+
diff --git a/src/server/mail_templates/reset_password.html b/src/server/mail_templates/reset_password.html
new file mode 100644
index 0000000..4b6a41b
--- /dev/null
+++ b/src/server/mail_templates/reset_password.html
@@ -0,0 +1,43 @@
+
+
+
+ Reset Password
+
+
+
+
+
+
+
+
+
+
+ Tech Tracker
+
+
+
+
+
+
Reset Your Password
+
+
Hello,
+
+
We received a request to reset your password for your Tech Tracker account. Follow this link to reset the password for your user:
+
+
+
+
If you didn't request a password reset, you can safely ignore this email.
+
+
This link will expire in 1 hour.
+
+
+
Tech Tracker - City of Gulfport
+
+
+
+
diff --git a/src/styles/globals.css b/src/styles/globals.css
index d65fca5..a4d79be 100644
--- a/src/styles/globals.css
+++ b/src/styles/globals.css
@@ -1,72 +1,122 @@
-@tailwind base;
-@tailwind components;
-@tailwind utilities;
+@import "tailwindcss";
-@layer base {
- :root {
- --background: 0 0% 100%;
- --foreground: 224 71.4% 4.1%;
- --card: 0 0% 100%;
- --card-foreground: 224 71.4% 4.1%;
- --popover: 0 0% 100%;
- --popover-foreground: 224 71.4% 4.1%;
- /*--primary: 262.1 83.3% 57.8%;*/
- --primary: 231 39% 48%;
- --primary-dark: 231 39% 38%;
- --primary-light: 231 45% 58%;
- --primary-foreground: 210 20% 98%;
- --secondary: 220 14.3% 95.9%;
- --secondary-foreground: 220.9 39.3% 11%;
- --muted: 220 14.3% 95.9%;
- --muted-foreground: 220 8.9% 46.1%;
- --accent: 220 14.3% 95.9%;
- --accent-foreground: 220.9 39.3% 11%;
- --destructive: 0 84.2% 60.2%;
- --destructive-foreground: 210 20% 98%;
- --border: 220 13% 91%;
- --input: 220 13% 91%;
- --ring: 262.1 83.3% 57.8%;
- --radius: 0.75rem;
- --chart-1: 12 76% 61%;
- --chart-2: 173 58% 39%;
- --chart-3: 197 37% 24%;
- --chart-4: 43 74% 66%;
- --chart-5: 27 87% 67%;
- }
- .dark {
- --background: 224 71.4% 4.1%;
- --foreground: 210 20% 98%;
- --card: 224 71.4% 4.1%;
- --card-foreground: 210 20% 98%;
- --popover: 224 71.4% 4.1%;
- --popover-foreground: 210 20% 98%;
- /*--primary: 263.4 70% 50.4%;*/
- --primary: 231 39% 48%;
- --primary-dark: 231 39% 38%;
- --primary-light: 231 45% 58%;
- --primary-foreground: 210 20% 98%;
- --secondary: 215 27.9% 16.9%;
- --secondary-foreground: 210 20% 98%;
- --muted: 215 27.9% 16.9%;
- --muted-foreground: 217.9 10.6% 64.9%;
- --accent: 215 27.9% 16.9%;
- --accent-foreground: 210 20% 98%;
- --destructive: 0 62.8% 30.6%;
- --destructive-foreground: 210 20% 98%;
- --border: 215 27.9% 16.9%;
- --input: 215 27.9% 16.9%;
- --ring: 263.4 70% 50.4%;
- --chart-1: 220 70% 50%;
- --chart-2: 160 60% 45%;
- --chart-3: 30 80% 55%;
- --chart-4: 280 65% 60%;
- --chart-5: 340 75% 55%;
- }
+@custom-variant dark (&:is(.dark *));
+
+@theme {
+ --font-sans: var(--font-geist-sans), ui-sans-serif, system-ui, sans-serif,
+ "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "Noto Color Emoji";
+}
+
+@theme inline {
+ --radius-sm: calc(var(--radius) - 4px);
+ --radius-md: calc(var(--radius) - 2px);
+ --radius-lg: var(--radius);
+ --radius-xl: calc(var(--radius) + 4px);
+ --color-background: var(--background);
+ --color-foreground: var(--foreground);
+ --color-card: var(--card);
+ --color-card-foreground: var(--card-foreground);
+ --color-popover: var(--popover);
+ --color-popover-foreground: var(--popover-foreground);
+ --color-primary: var(--primary);
+ --color-primary-foreground: var(--primary-foreground);
+ --color-secondary: var(--secondary);
+ --color-secondary-foreground: var(--secondary-foreground);
+ --color-muted: var(--muted);
+ --color-muted-foreground: var(--muted-foreground);
+ --color-accent: var(--accent);
+ --color-accent-foreground: var(--accent-foreground);
+ --color-destructive: var(--destructive);
+ --color-border: var(--border);
+ --color-input: var(--input);
+ --color-ring: var(--ring);
+ --color-chart-1: var(--chart-1);
+ --color-chart-2: var(--chart-2);
+ --color-chart-3: var(--chart-3);
+ --color-chart-4: var(--chart-4);
+ --color-chart-5: var(--chart-5);
+ --color-sidebar: var(--sidebar);
+ --color-sidebar-foreground: var(--sidebar-foreground);
+ --color-sidebar-primary: var(--sidebar-primary);
+ --color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
+ --color-sidebar-accent: var(--sidebar-accent);
+ --color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
+ --color-sidebar-border: var(--sidebar-border);
+ --color-sidebar-ring: var(--sidebar-ring);
+}
+
+:root {
+ --radius: 0.625rem;
+ --background: oklch(1 0 0);
+ --foreground: oklch(0.145 0 0);
+ --card: oklch(1 0 0);
+ --card-foreground: oklch(0.145 0 0);
+ --popover: oklch(1 0 0);
+ --popover-foreground: oklch(0.145 0 0);
+ --primary: oklch(0.205 0 0);
+ --primary-foreground: oklch(0.985 0 0);
+ --secondary: oklch(0.97 0 0);
+ --secondary-foreground: oklch(0.205 0 0);
+ --muted: oklch(0.97 0 0);
+ --muted-foreground: oklch(0.556 0 0);
+ --accent: oklch(0.97 0 0);
+ --accent-foreground: oklch(0.205 0 0);
+ --destructive: oklch(0.577 0.245 27.325);
+ --border: oklch(0.922 0 0);
+ --input: oklch(0.922 0 0);
+ --ring: oklch(0.708 0 0);
+ --chart-1: oklch(0.646 0.222 41.116);
+ --chart-2: oklch(0.6 0.118 184.704);
+ --chart-3: oklch(0.398 0.07 227.392);
+ --chart-4: oklch(0.828 0.189 84.429);
+ --chart-5: oklch(0.769 0.188 70.08);
+ --sidebar: oklch(0.985 0 0);
+ --sidebar-foreground: oklch(0.145 0 0);
+ --sidebar-primary: oklch(0.205 0 0);
+ --sidebar-primary-foreground: oklch(0.985 0 0);
+ --sidebar-accent: oklch(0.97 0 0);
+ --sidebar-accent-foreground: oklch(0.205 0 0);
+ --sidebar-border: oklch(0.922 0 0);
+ --sidebar-ring: oklch(0.708 0 0);
+}
+
+.dark {
+ --background: oklch(0.145 0 0);
+ --foreground: oklch(0.985 0 0);
+ --card: oklch(0.205 0 0);
+ --card-foreground: oklch(0.985 0 0);
+ --popover: oklch(0.205 0 0);
+ --popover-foreground: oklch(0.985 0 0);
+ --primary: oklch(0.922 0 0);
+ --primary-foreground: oklch(0.205 0 0);
+ --secondary: oklch(0.269 0 0);
+ --secondary-foreground: oklch(0.985 0 0);
+ --muted: oklch(0.269 0 0);
+ --muted-foreground: oklch(0.708 0 0);
+ --accent: oklch(0.269 0 0);
+ --accent-foreground: oklch(0.985 0 0);
+ --destructive: oklch(0.704 0.191 22.216);
+ --border: oklch(1 0 0 / 10%);
+ --input: oklch(1 0 0 / 15%);
+ --ring: oklch(0.556 0 0);
+ --chart-1: oklch(0.488 0.243 264.376);
+ --chart-2: oklch(0.696 0.17 162.48);
+ --chart-3: oklch(0.769 0.188 70.08);
+ --chart-4: oklch(0.627 0.265 303.9);
+ --chart-5: oklch(0.645 0.246 16.439);
+ --sidebar: oklch(0.205 0 0);
+ --sidebar-foreground: oklch(0.985 0 0);
+ --sidebar-primary: oklch(0.488 0.243 264.376);
+ --sidebar-primary-foreground: oklch(0.985 0 0);
+ --sidebar-accent: oklch(0.269 0 0);
+ --sidebar-accent-foreground: oklch(0.985 0 0);
+ --sidebar-border: oklch(1 0 0 / 10%);
+ --sidebar-ring: oklch(0.556 0 0);
}
@layer base {
* {
- @apply border-border;
+ @apply border-border outline-ring/50;
}
body {
@apply bg-background text-foreground;
diff --git a/src/utils/supabase/client.ts b/src/utils/supabase/client.ts
index df21bdf..a48c184 100644
--- a/src/utils/supabase/client.ts
+++ b/src/utils/supabase/client.ts
@@ -1,8 +1,10 @@
-import { createBrowserClient } from '@supabase/ssr';
+'use client';
-export const createClient = () => {
- return createBrowserClient(
+import { createBrowserClient } from '@supabase/ssr';
+import type { Database } from '@/utils/supabase/types';
+
+export const createClient = () =>
+ createBrowserClient(
process.env.NEXT_PUBLIC_SUPABASE_URL!,
process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!,
);
-};
diff --git a/src/utils/supabase/index.ts b/src/utils/supabase/index.ts
new file mode 100644
index 0000000..4e8f72f
--- /dev/null
+++ b/src/utils/supabase/index.ts
@@ -0,0 +1,5 @@
+export { createClient } from './client';
+export { createServerClient } from './server';
+export { updateSession } from './middleware';
+export type * from './utils';
+export type { Database } from './types';
diff --git a/src/utils/supabase/middleware.ts b/src/utils/supabase/middleware.ts
index 71cc6eb..b4c2cd3 100644
--- a/src/utils/supabase/middleware.ts
+++ b/src/utils/supabase/middleware.ts
@@ -1,33 +1,56 @@
import { createServerClient } from '@supabase/ssr';
-import { NextResponse, type NextRequest } from 'next/server';
+import { type NextRequest, NextResponse } from 'next/server';
+import type { Database } from '@/utils/supabase/types';
-export async function updateSession(request: NextRequest) {
- let supabaseResponse = NextResponse.next({
- request,
- });
- const supabase = createServerClient(
- process.env.NEXT_PUBLIC_SUPABASE_URL!,
- process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!,
- {
- cookies: {
- getAll() {
- return request.cookies.getAll();
- },
- setAll(cookiesToSet) {
- cookiesToSet.forEach(({ name, value, options }) =>
- request.cookies.set(name, value),
- );
- supabaseResponse = NextResponse.next({
- request,
- });
- cookiesToSet.forEach(({ name, value, options }) =>
- supabaseResponse.cookies.set(name, value, options),
- );
+export const updateSession = async (
+ request: NextRequest,
+): Promise => {
+ try {
+ // Create an unmodified response
+ let response = NextResponse.next({
+ request: {
+ headers: request.headers,
+ },
+ });
+
+ const supabase = createServerClient(
+ process.env.NEXT_PUBLIC_SUPABASE_URL!,
+ process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!,
+ {
+ cookies: {
+ getAll() {
+ return request.cookies.getAll();
+ },
+ setAll(cookiesToSet) {
+ cookiesToSet.forEach(({ name, value }) =>
+ request.cookies.set(name, value),
+ );
+ response = NextResponse.next({
+ request,
+ });
+ cookiesToSet.forEach(({ name, value, options }) =>
+ response.cookies.set(name, value, options),
+ );
+ },
},
},
- },
- );
- // refreshing the auth token
- await supabase.auth.getUser();
- return supabaseResponse;
-}
+ );
+
+ // This will refresh session if expired - required for Server Components
+ // https://supabase.com/docs/guides/auth/server-side/nextjs
+ const user = await supabase.auth.getUser();
+
+ // protected routes
+ if (request.nextUrl.pathname.startsWith('/reset-password') && user.error) {
+ return NextResponse.redirect(new URL('/sign-in', request.url));
+ }
+
+ return response;
+ } catch (e) {
+ return NextResponse.next({
+ request: {
+ headers: request.headers,
+ },
+ });
+ }
+};
diff --git a/src/utils/supabase/server.ts b/src/utils/supabase/server.ts
index 4c9e820..82462a5 100644
--- a/src/utils/supabase/server.ts
+++ b/src/utils/supabase/server.ts
@@ -1,12 +1,14 @@
-import { createServerClient } from '@supabase/ssr';
+'use server';
+
+import 'server-only';
+import { createServerClient as CreateServerClient } from '@supabase/ssr';
+import type { Database } from '@/utils/supabase/types';
import { cookies } from 'next/headers';
-export async function createClient() {
+export const createServerClient = async () => {
const cookieStore = await cookies();
- // Create a server's supabase client with newly configured cookie,
- // which could be used to maintain user's session
- return createServerClient(
+ return CreateServerClient(
process.env.NEXT_PUBLIC_SUPABASE_URL!,
process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!,
{
@@ -16,11 +18,11 @@ export async function createClient() {
},
setAll(cookiesToSet) {
try {
- cookiesToSet.forEach(({ name, value, options }) =>
- cookieStore.set(name, value, options),
- );
- } catch {
- // The `setAll` method was called from a Server Component.
+ cookiesToSet.forEach(({ name, value, options }) => {
+ cookieStore.set(name, value, options);
+ });
+ } catch (error) {
+ // The `set` method was called from a Server Component.
// This can be ignored if you have middleware refreshing
// user sessions.
}
@@ -28,4 +30,4 @@ export async function createClient() {
},
},
);
-}
+};
diff --git a/src/utils/supabase/types.ts b/src/utils/supabase/types.ts
new file mode 100644
index 0000000..3d2165a
--- /dev/null
+++ b/src/utils/supabase/types.ts
@@ -0,0 +1,188 @@
+export type Json =
+ | string
+ | number
+ | boolean
+ | null
+ | { [key: string]: Json | undefined }
+ | Json[];
+
+export type Database = {
+ public: {
+ Tables: {
+ profiles: {
+ Row: {
+ avatar_url: string | null;
+ email: string | null;
+ full_name: string | null;
+ id: string;
+ provider: string | null;
+ updated_at: string | null;
+ };
+ Insert: {
+ avatar_url?: string | null;
+ email?: string | null;
+ full_name?: string | null;
+ id: string;
+ provider?: string | null;
+ updated_at?: string | null;
+ };
+ Update: {
+ avatar_url?: string | null;
+ email?: string | null;
+ full_name?: string | null;
+ id?: string;
+ provider?: string | null;
+ updated_at?: string | null;
+ };
+ Relationships: [];
+ };
+ statuses: {
+ Row: {
+ created_at: string;
+ id: string;
+ status: string;
+ updated_by_id: string | null;
+ user_id: string;
+ };
+ Insert: {
+ created_at?: string;
+ id?: string;
+ status: string;
+ updated_by_id?: string | null;
+ user_id: string;
+ };
+ Update: {
+ created_at?: string;
+ id?: string;
+ status?: string;
+ updated_by_id?: string | null;
+ user_id?: string;
+ };
+ Relationships: [];
+ };
+ };
+ Views: {
+ [_ in never]: never;
+ };
+ Functions: {
+ [_ in never]: never;
+ };
+ Enums: {
+ [_ in never]: never;
+ };
+ CompositeTypes: {
+ [_ in never]: never;
+ };
+ };
+};
+
+type DefaultSchema = Database[Extract];
+
+export type Tables<
+ DefaultSchemaTableNameOrOptions extends
+ | keyof (DefaultSchema['Tables'] & DefaultSchema['Views'])
+ | { schema: keyof Database },
+ TableName extends DefaultSchemaTableNameOrOptions extends {
+ schema: keyof Database;
+ }
+ ? keyof (Database[DefaultSchemaTableNameOrOptions['schema']]['Tables'] &
+ Database[DefaultSchemaTableNameOrOptions['schema']]['Views'])
+ : never = never,
+> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database }
+ ? (Database[DefaultSchemaTableNameOrOptions['schema']]['Tables'] &
+ Database[DefaultSchemaTableNameOrOptions['schema']]['Views'])[TableName] extends {
+ Row: infer R;
+ }
+ ? R
+ : never
+ : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema['Tables'] &
+ DefaultSchema['Views'])
+ ? (DefaultSchema['Tables'] &
+ DefaultSchema['Views'])[DefaultSchemaTableNameOrOptions] extends {
+ Row: infer R;
+ }
+ ? R
+ : never
+ : never;
+
+export type TablesInsert<
+ DefaultSchemaTableNameOrOptions extends
+ | keyof DefaultSchema['Tables']
+ | { schema: keyof Database },
+ TableName extends DefaultSchemaTableNameOrOptions extends {
+ schema: keyof Database;
+ }
+ ? keyof Database[DefaultSchemaTableNameOrOptions['schema']]['Tables']
+ : never = never,
+> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database }
+ ? Database[DefaultSchemaTableNameOrOptions['schema']]['Tables'][TableName] extends {
+ Insert: infer I;
+ }
+ ? I
+ : never
+ : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema['Tables']
+ ? DefaultSchema['Tables'][DefaultSchemaTableNameOrOptions] extends {
+ Insert: infer I;
+ }
+ ? I
+ : never
+ : never;
+
+export type TablesUpdate<
+ DefaultSchemaTableNameOrOptions extends
+ | keyof DefaultSchema['Tables']
+ | { schema: keyof Database },
+ TableName extends DefaultSchemaTableNameOrOptions extends {
+ schema: keyof Database;
+ }
+ ? keyof Database[DefaultSchemaTableNameOrOptions['schema']]['Tables']
+ : never = never,
+> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database }
+ ? Database[DefaultSchemaTableNameOrOptions['schema']]['Tables'][TableName] extends {
+ Update: infer U;
+ }
+ ? U
+ : never
+ : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema['Tables']
+ ? DefaultSchema['Tables'][DefaultSchemaTableNameOrOptions] extends {
+ Update: infer U;
+ }
+ ? U
+ : never
+ : never;
+
+export type Enums<
+ DefaultSchemaEnumNameOrOptions extends
+ | keyof DefaultSchema['Enums']
+ | { schema: keyof Database },
+ EnumName extends DefaultSchemaEnumNameOrOptions extends {
+ schema: keyof Database;
+ }
+ ? keyof Database[DefaultSchemaEnumNameOrOptions['schema']]['Enums']
+ : never = never,
+> = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database }
+ ? Database[DefaultSchemaEnumNameOrOptions['schema']]['Enums'][EnumName]
+ : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema['Enums']
+ ? DefaultSchema['Enums'][DefaultSchemaEnumNameOrOptions]
+ : never;
+
+export type CompositeTypes<
+ PublicCompositeTypeNameOrOptions extends
+ | keyof DefaultSchema['CompositeTypes']
+ | { schema: keyof Database },
+ CompositeTypeName extends PublicCompositeTypeNameOrOptions extends {
+ schema: keyof Database;
+ }
+ ? keyof Database[PublicCompositeTypeNameOrOptions['schema']]['CompositeTypes']
+ : never = never,
+> = PublicCompositeTypeNameOrOptions extends { schema: keyof Database }
+ ? Database[PublicCompositeTypeNameOrOptions['schema']]['CompositeTypes'][CompositeTypeName]
+ : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema['CompositeTypes']
+ ? DefaultSchema['CompositeTypes'][PublicCompositeTypeNameOrOptions]
+ : never;
+
+export const Constants = {
+ public: {
+ Enums: {},
+ },
+} as const;
diff --git a/src/utils/supabase/utils.ts b/src/utils/supabase/utils.ts
new file mode 100644
index 0000000..150ad65
--- /dev/null
+++ b/src/utils/supabase/utils.ts
@@ -0,0 +1,26 @@
+import type { Database } from '@/utils/supabase/types';
+export type { User } from '@supabase/supabase-js';
+
+// Table row types
+export type Profile = Database['public']['Tables']['profiles']['Row'];
+export type Status = Database['public']['Tables']['statuses']['Row'];
+
+// Insert types
+export type ProfileInsert = Database['public']['Tables']['profiles']['Insert'];
+export type StatusInsert = Database['public']['Tables']['statuses']['Insert'];
+
+// Update types
+export type ProfileUpdate = Database['public']['Tables']['profiles']['Update'];
+export type StatusUpdate = Database['public']['Tables']['statuses']['Update'];
+
+// Generic helper to get any table's row type
+export type TableRow =
+ Database['public']['Tables'][T]['Row'];
+
+// Generic helper to get any table's insert type
+export type TableInsert =
+ Database['public']['Tables'][T]['Insert'];
+
+// Generic helper to get any table's update type
+export type TableUpdate =
+ Database['public']['Tables'][T]['Update'];
diff --git a/tailwind.config.ts b/tailwind.config.ts
deleted file mode 100644
index d7f6e39..0000000
--- a/tailwind.config.ts
+++ /dev/null
@@ -1,70 +0,0 @@
-import { type Config } from 'tailwindcss';
-import { fontFamily } from 'tailwindcss/defaultTheme';
-
-export default {
- darkMode: ['class'],
- content: ['./src/**/*.tsx'],
- theme: {
- extend: {
- fontFamily: {
- sans: ['var(--font-geist-sans)', ...fontFamily.sans],
- },
- borderRadius: {
- lg: 'var(--radius)',
- md: 'calc(var(--radius) - 2px)',
- sm: 'calc(var(--radius) - 4px)',
- },
- colors: {
- background: 'hsl(var(--background))',
- foreground: 'hsl(var(--foreground))',
- card: {
- DEFAULT: 'hsl(var(--card))',
- foreground: 'hsl(var(--card-foreground))',
- },
- popover: {
- DEFAULT: 'hsl(var(--popover))',
- foreground: 'hsl(var(--popover-foreground))',
- },
- primary: {
- DEFAULT: 'hsl(var(--primary))',
- foreground: 'hsl(var(--primary-foreground))',
- },
- primarydark: {
- DEFAULT: 'hsl(var(--primary-dark))',
- foreground: 'hsl(var(--primary-foreground))',
- },
- primarylight: {
- DEFAULT: 'hsl(var(--primary-light))',
- foreground: 'hsl(var(--primary-foreground))',
- },
- secondary: {
- DEFAULT: 'hsl(var(--secondary))',
- foreground: 'hsl(var(--secondary-foreground))',
- },
- muted: {
- DEFAULT: 'hsl(var(--muted))',
- foreground: 'hsl(var(--muted-foreground))',
- },
- accent: {
- DEFAULT: 'hsl(var(--accent))',
- foreground: 'hsl(var(--accent-foreground))',
- },
- destructive: {
- DEFAULT: 'hsl(var(--destructive))',
- foreground: 'hsl(var(--destructive-foreground))',
- },
- border: 'hsl(var(--border))',
- input: 'hsl(var(--input))',
- ring: 'hsl(var(--ring))',
- chart: {
- '1': 'hsl(var(--chart-1))',
- '2': 'hsl(var(--chart-2))',
- '3': 'hsl(var(--chart-3))',
- '4': 'hsl(var(--chart-4))',
- '5': 'hsl(var(--chart-5))',
- },
- },
- },
- },
- plugins: [require('tailwindcss-animate')],
-} satisfies Config;
diff --git a/tsconfig.json b/tsconfig.json
index c5eef6e..62d6fc0 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -8,6 +8,7 @@
"resolveJsonModule": true,
"moduleDetection": "force",
"isolatedModules": true,
+ "verbatimModuleSyntax": true,
/* Strictness */
"strict": true,
@@ -30,7 +31,6 @@
}
},
"include": [
- ".eslintrc.cjs",
"next-env.d.ts",
"**/*.ts",
"**/*.tsx",