diff --git a/apps/obsidian/scripts/compile.ts b/apps/obsidian/scripts/compile.ts index e749cb6af..1a6b86ebc 100644 --- a/apps/obsidian/scripts/compile.ts +++ b/apps/obsidian/scripts/compile.ts @@ -123,8 +123,8 @@ export const compile = ({ "process.env.SUPABASE_URL": dbEnv.SUPABASE_URL ? `"${dbEnv.SUPABASE_URL}"` : "null", - "process.env.SUPABASE_ANON_KEY": dbEnv.SUPABASE_ANON_KEY - ? `"${dbEnv.SUPABASE_ANON_KEY}"` + "process.env.SUPABASE_PUBLISHABLE_KEY": dbEnv.SUPABASE_PUBLISHABLE_KEY + ? `"${dbEnv.SUPABASE_PUBLISHABLE_KEY}"` : "null", "process.env.NEXT_API_ROOT": `"${dbEnv.NEXT_API_ROOT || ""}"`, }, diff --git a/apps/roam/scripts/compile.ts b/apps/roam/scripts/compile.ts index 19d36b71f..7db5822cc 100644 --- a/apps/roam/scripts/compile.ts +++ b/apps/roam/scripts/compile.ts @@ -162,8 +162,8 @@ export const compile = ({ "process.env.SUPABASE_URL": dbEnv.SUPABASE_URL ? `"${dbEnv.SUPABASE_URL}"` : "null", - "process.env.SUPABASE_ANON_KEY": dbEnv.SUPABASE_ANON_KEY - ? `"${dbEnv.SUPABASE_ANON_KEY}"` + "process.env.SUPABASE_PUBLISHABLE_KEY": dbEnv.SUPABASE_PUBLISHABLE_KEY + ? `"${dbEnv.SUPABASE_PUBLISHABLE_KEY}"` : "null", "process.env.NEXT_API_ROOT": `"${dbEnv.NEXT_API_ROOT || ""}"`, "window.__DISCOURSE_GRAPH_VERSION__": `"${getVersion()}"`, diff --git a/apps/website/app/api/supabase/env/route.ts b/apps/website/app/api/supabase/env/route.ts index d89572b7a..239520260 100644 --- a/apps/website/app/api/supabase/env/route.ts +++ b/apps/website/app/api/supabase/env/route.ts @@ -6,12 +6,13 @@ import { export const GET = (request: NextRequest): NextResponse => { try { - const { SUPABASE_URL, SUPABASE_ANON_KEY } = process.env; - if (!SUPABASE_URL || !SUPABASE_ANON_KEY) + const { SUPABASE_URL, SUPABASE_ANON_KEY, SUPABASE_PUBLISHABLE_KEY } = + process.env; + if (!SUPABASE_URL || !SUPABASE_PUBLISHABLE_KEY) return new NextResponse("Missing variables", { status: 500 }); return NextResponse.json( // eslint-disable-next-line @typescript-eslint/naming-convention - { SUPABASE_URL, SUPABASE_ANON_KEY }, + { SUPABASE_URL, SUPABASE_ANON_KEY, SUPABASE_PUBLISHABLE_KEY }, { status: 200 }, ); } catch (e: unknown) { diff --git a/apps/website/app/utils/supabase/middleware.ts b/apps/website/app/utils/supabase/middleware.ts index 1854e5c52..a2f4950ce 100644 --- a/apps/website/app/utils/supabase/middleware.ts +++ b/apps/website/app/utils/supabase/middleware.ts @@ -10,7 +10,7 @@ import { envContents } from "@repo/database/dbDotEnv"; export const updateSession = async (request: NextRequest) => { const dbEnv = envContents(); const supabaseUrl = dbEnv.SUPABASE_URL; - const supabaseKey = dbEnv.SUPABASE_ANON_KEY; + const supabaseKey = dbEnv.SUPABASE_PUBLISHABLE_KEY; if (!supabaseUrl || !supabaseKey) { throw new Error("Missing required Supabase environment variables"); diff --git a/apps/website/app/utils/supabase/server.ts b/apps/website/app/utils/supabase/server.ts index 080a8ca86..e3377ed74 100644 --- a/apps/website/app/utils/supabase/server.ts +++ b/apps/website/app/utils/supabase/server.ts @@ -10,7 +10,7 @@ export const createClient = async () => { const dbEnv = envContents(); const cookieStore = await cookies(); const supabaseUrl = dbEnv.SUPABASE_URL; - const supabaseKey = dbEnv.SUPABASE_ANON_KEY; + const supabaseKey = dbEnv.SUPABASE_PUBLISHABLE_KEY; if (!supabaseUrl || !supabaseKey) { throw new Error("Missing required Supabase environment variables"); diff --git a/packages/database/README.md b/packages/database/README.md index 822a3b630..5034f60c6 100644 --- a/packages/database/README.md +++ b/packages/database/README.md @@ -98,3 +98,11 @@ This should be used with extreme caution, as there is not currently adequate sec It may be appropriate if there is a problem in production that is due to corrupted data (vs schema issues), and it is somehow simpler to test code to repair it directly than to load the data locally. Again, if all your code is running through Vercel API endpoints, the simplest way is to set `NEXT_API_ROOT` to the url of the API of the production Vercel branch (`https://discoursegraphs.com/api`). But in most other cases, you will want your code to talk to the production database. set up vercel as above, and set `SUPABASE_USE_DB=production` in your console before running `turbo dev`. + +## JWT token management + +We are now using JWT Signing keys. See the Supabase [announcement](https://github.com/supabase/supabase/blob/037e5f90a5689c3d847bd2adf9c8ec3956a0e7a0/apps/docs/content/guides/functions/auth.mdx) and [documentation](https://supabase.com/docs/guides/auth/signing-keys). + +This allows for better key management in general, including key deprecation. One small downside is that the value of `SUPABASE_PUBLISHABLE_KEY` and `SUPABASE_SECRET_KEY`, generated in `https://supabase.com/dashboard/project//settings/jwt` has to be manually transferred into the edge function secrets, under slightly different names (since the `SUPABASE_` prefix is reserved, we replace it with `SB_`.) This is done in `https://supabase.com/dashboard/project//functions/secrets`. The announcement says this may get automated at some point. + +We also need to transfer the `SUPABASE_PUBLISHABLE_KEY` to github secrets (without rename.) The vercel environment gets updated automaticaly. diff --git a/packages/database/features/step-definitions/stepdefs.ts b/packages/database/features/step-definitions/stepdefs.ts index c7c8bb903..3b3b884cc 100644 --- a/packages/database/features/step-definitions/stepdefs.ts +++ b/packages/database/features/step-definitions/stepdefs.ts @@ -28,14 +28,14 @@ if (getVariant() === "production") { config(); const getAnonymousClient = () => { - if (!process.env.SUPABASE_URL || !process.env.SUPABASE_ANON_KEY) { + if (!process.env.SUPABASE_URL || !process.env.SUPABASE_PUBLISHABLE_KEY) { throw new Error( - "Missing required environment variables: SUPABASE_URL and SUPABASE_ANON_KEY", + "Missing required environment variables: SUPABASE_URL and SUPABASE_PUBLISHABLE_KEY", ); } return createClient( process.env.SUPABASE_URL, - process.env.SUPABASE_ANON_KEY, + process.env.SUPABASE_PUBLISHABLE_KEY, ); }; diff --git a/packages/database/scripts/createEnv.mts b/packages/database/scripts/createEnv.mts index 15aa5f690..257de9b20 100644 --- a/packages/database/scripts/createEnv.mts +++ b/packages/database/scripts/createEnv.mts @@ -1,5 +1,5 @@ import { execSync } from "node:child_process"; -import { appendFileSync, writeFileSync } from "node:fs"; +import { appendFileSync, writeFileSync, readFileSync } from "node:fs"; import { join, dirname } from "node:path"; import { fileURLToPath } from "node:url"; import dotenv from "dotenv"; @@ -28,6 +28,10 @@ const getVercelToken = () => { return process.env["VERCEL_TOKEN"]; }; +const makeFnEnv = (envTxt: string): string => { + return envTxt.split('\n').filter(l=>l.match(/^SUPABASE_\w+_KEY/)).map((l)=> l.replace('SUPABASE_', 'SB_')).join('\n'); +} + const makeLocalEnv = () => { execSync("supabase start", { cwd: projectRoot, stdio: "inherit" @@ -48,6 +52,10 @@ const makeLocalEnv = () => { join(projectRoot, ".env.local"), prefixed + '\nNEXT_API_ROOT="http://localhost:3000/api"\n', ); + writeFileSync( + join(projectRoot, "supabase/functions/.env"), + makeFnEnv(prefixed) + ) }; const makeBranchEnv = async (vercel: Vercel, vercelToken: string) => { @@ -86,6 +94,11 @@ const makeBranchEnv = async (vercel: Vercel, vercelToken: string) => { throw err; } appendFileSync(".env.branch", `NEXT_API_ROOT="https://${url}/api"\n`); + const fromVercel = readFileSync('.env.branch').toString(); + writeFileSync( + join(projectRoot, "supabase/functions/.env"), + makeFnEnv(fromVercel) + ) }; const makeProductionEnv = async (vercel: Vercel, vercelToken: string) => { @@ -104,6 +117,11 @@ const makeProductionEnv = async (vercel: Vercel, vercelToken: string) => { `vercel -t ${vercelToken} env pull --environment production .env.production`, ); appendFileSync(".env.production", `NEXT_API_ROOT="https://${url}/api"\n`); + const fromVercel = readFileSync('.env.production').toString(); + writeFileSync( + join(projectRoot, "supabase/functions/.env"), + makeFnEnv(fromVercel) + ) }; const main = async (variant: Variant) => { @@ -118,7 +136,7 @@ const main = async (variant: Variant) => { ); return; } catch (e) { - if (process.env.SUPABASE_URL && process.env.SUPABASE_ANON_KEY) + if (process.env.SUPABASE_URL && process.env.SUPABASE_PUBLISHABLE_KEY) return; throw new Error("Could not get environment from site"); } diff --git a/packages/database/src/dbDotEnv.mjs b/packages/database/src/dbDotEnv.mjs index 8b1e2ac78..505be334c 100644 --- a/packages/database/src/dbDotEnv.mjs +++ b/packages/database/src/dbDotEnv.mjs @@ -29,7 +29,7 @@ export const getVariant = () => { variant = process.env["SUPABASE_USE_DB"]; } const processHasVars = - !!process.env["SUPABASE_URL"] && !!process.env["SUPABASE_ANON_KEY"]; + !!process.env["SUPABASE_URL"] && !!process.env["SUPABASE_PUBLISHABLE_KEY"]; if ( ["local", "branch", "production", "none", "implicit", undefined].indexOf( @@ -77,7 +77,7 @@ export const envContents = () => { // Fallback to process.env when running in production environments const raw = { SUPABASE_URL: process.env.SUPABASE_URL, - SUPABASE_ANON_KEY: process.env.SUPABASE_ANON_KEY, + SUPABASE_PUBLISHABLE_KEY: process.env.SUPABASE_PUBLISHABLE_KEY, NEXT_API_ROOT: process.env.NEXT_API_ROOT, }; return Object.fromEntries(Object.entries(raw).filter(([, v]) => !!v)); diff --git a/packages/database/src/lib/contextFunctions.ts b/packages/database/src/lib/contextFunctions.ts index f7836b02a..1180d9ab5 100644 --- a/packages/database/src/lib/contextFunctions.ts +++ b/packages/database/src/lib/contextFunctions.ts @@ -90,7 +90,7 @@ let lastStorageKey: string | undefined = undefined; // to ensure we never have conflict between multiple clients const createSingletonClient = (uniqueKey: string): DGSupabaseClient | null => { const url = process.env.SUPABASE_URL; - const key = process.env.SUPABASE_ANON_KEY; + const key = process.env.SUPABASE_PUBLISHABLE_KEY; if (!url || !key) { throw new FatalError("Missing required Supabase environment variables"); diff --git a/packages/database/supabase/config.toml b/packages/database/supabase/config.toml index 59f9aa408..9db6dabc8 100644 --- a/packages/database/supabase/config.toml +++ b/packages/database/supabase/config.toml @@ -325,7 +325,7 @@ s3_secret_key = "env(S3_SECRET_KEY)" [functions.create-space] enabled = true -verify_jwt = true +verify_jwt = false import_map = "./functions/create-space/deno.json" # Uncomment to specify a custom file path to the entrypoint. # Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx @@ -333,3 +333,14 @@ entrypoint = "./functions/create-space/index.ts" # Specifies static files to be bundled with the function. Supports glob patterns. # For example, if you want to serve static HTML pages in your function: # static_files = [ "./functions/create_space/*.html" ] + +[functions.create-group] +enabled = true +verify_jwt = false +import_map = "./functions/create-group/deno.json" +# Uncomment to specify a custom file path to the entrypoint. +# Supported file extensions are: .ts, .js, .mjs, .jsx, .tsx +entrypoint = "./functions/create-group/index.ts" +# Specifies static files to be bundled with the function. Supports glob patterns. +# For example, if you want to serve static HTML pages in your function: +# static_files = [ "./functions/create_group/*.html" ] diff --git a/packages/database/supabase/functions/create-group/index.ts b/packages/database/supabase/functions/create-group/index.ts index 33cf295fe..8f07cba5b 100644 --- a/packages/database/supabase/functions/create-group/index.ts +++ b/packages/database/supabase/functions/create-group/index.ts @@ -50,18 +50,26 @@ Deno.serve(async (req) => { // @ts-ignore Deno is not visible to the IDE const url = Deno.env.get("SUPABASE_URL"); // @ts-ignore Deno is not visible to the IDE - const service_key = Deno.env.get("SUPABASE_SERVICE_ROLE_KEY"); + const service_key = Deno.env.get("SB_SECRET_KEY"); // @ts-ignore Deno is not visible to the IDE - const anon_key = Deno.env.get("SUPABASE_ANON_KEY"); + const anon_key = Deno.env.get("SB_PUBLISHABLE_KEY"); if (!url || !anon_key || !service_key) { - return new Response("Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY or SUPABASE_ANON_KEY", { + return new Response("Missing SUPABASE_URL or SB_SECRET_KEY or SB_PUBLISHABLE_KEY", { status: 500, headers: { "Content-Type": "application/json" }, }); } const supabase = createClient(url, anon_key) - const authHeader = req.headers.get('Authorization')! + const authHeader = req.headers.get('Authorization'); + if (!authHeader) { + return Response.json( + { msg: 'Missing authorization headers' }, + { + status: 401, + } + ) + } const token = authHeader.replace('Bearer ', '') const { data, error } = await supabase.auth.getClaims(token) diff --git a/packages/database/supabase/functions/create-space/index.ts b/packages/database/supabase/functions/create-space/index.ts index f079d0d24..db3bbd7ca 100644 --- a/packages/database/supabase/functions/create-space/index.ts +++ b/packages/database/supabase/functions/create-space/index.ts @@ -1,7 +1,6 @@ // Follow this setup guide to integrate the Deno language server with your editor: // https://deno.land/manual/getting_started/setup_your_environment // This enables autocomplete, go to definition, etc. - import "@supabase/functions-js/edge-runtime"; import { createClient, @@ -209,22 +208,46 @@ Deno.serve(async (req) => { }); } - const input = await req.json(); // @ts-ignore Deno is not visible to the IDE - const url = Deno.env.get("SUPABASE_URL"); + const url = Deno.env.get("SUPABASE_URL") as string | undefined; // @ts-ignore Deno is not visible to the IDE - const key = Deno.env.get("SUPABASE_SERVICE_ROLE_KEY"); + const key = Deno.env.get("SB_SECRET_KEY") as string | undefined; if (!url || !key) { - return new Response("Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY", { + return new Response("Missing SUPABASE_URL or SB_SECRET_KEY", { status: 500, headers: { "Content-Type": "application/json" }, }); } + + // check that we have at least a valid anonymous token with a dummy query. + // Unfortunately, this seems to be too permissive. + const authHeader = req.headers.get('Authorization') as string | undefined; + if (!authHeader) { + return Response.json( + { msg: 'Missing authorization headers' }, + { + status: 401, + } + ) + } + const token = authHeader.replace('Bearer ', ''); + const supabaseAnonClient: DGSupabaseClient = createClient( + url, token, { global: { headers: { Authorization: authHeader } } }); + { + const { error } = await supabaseAnonClient.from("Space").select("id").limit(1); + if (error?.code) return new Response(JSON.stringify(error), { + status: 401, + headers: { "Content-Type": "application/json" }, + }); + } + // note: If we wanted this to be bound by permissions, we'd set the following options: - // { global: { headers: { Authorization: req.headers.get('Authorization')! } } } + // { global: { headers: { Authorization: authHeader } } } // But the point here is to bypass RLS const supabase: DGSupabaseClient = createClient(url, key); + const input = await req.json(); + const { data, error } = await processAndGetOrCreateSpace(supabase, input); if (error) { const status = error.code === "invalid space" ? 400 : 500; diff --git a/turbo.json b/turbo.json index 8aadd5604..3837ef6e3 100644 --- a/turbo.json +++ b/turbo.json @@ -37,6 +37,7 @@ "RESEND_API_KEY", "SUPABASE_ACCESS_TOKEN", "SUPABASE_ANON_KEY", + "SUPABASE_PUBLISHABLE_KEY", "SUPABASE_JWT_SECRET", "SUPABASE_DB_PASSWORD", "VERCEL_TOKEN"