From 9ff2be3e09180a1952b44c4081b7e737aaca71c1 Mon Sep 17 00:00:00 2001 From: royendo <67675319+royendo@users.noreply.github.com> Date: Tue, 13 Jan 2026 16:19:29 -0500 Subject: [PATCH 1/6] env function --- runtime/parser/template.go | 27 ++++++++ runtime/parser/template_test.go | 111 ++++++++++++++++++++++++++++++++ 2 files changed, 138 insertions(+) diff --git a/runtime/parser/template.go b/runtime/parser/template.go index 8b47e2817c6..eb89bffbdd5 100644 --- a/runtime/parser/template.go +++ b/runtime/parser/template.go @@ -34,6 +34,7 @@ import ( // dependency [`kind`] `name`: register a dependency (parse time) // ref [`kind`] `name`: register a dependency at parse-time, resolve it to a name at resolve time (parse time and resolve time) // lookup [`kind`] `name`: lookup another resource (resolve time) +// env `name`: access a project "environment" variable (parse and resolve time) // .env.name: access a project "environment" variable (resolve time) // .user.attribute: access an attribute from auth claims (resolve time) // .meta: access the current resource's metadata (resolve time) @@ -128,6 +129,13 @@ func AnalyzeTemplate(tmpl string) (*TemplateMetadata, error) { refs[name] = true return map[string]any{}, nil } + funcMap["env"] = func(name string) (string, error) { + if name == "" { + return "", fmt.Errorf(`"env" requires a variable name argument`) + } + // At parse time, just return a placeholder + return "", nil + } // Parse template t, err := template.New("").Funcs(funcMap).Option("missingkey=default").Parse(tmpl) @@ -261,6 +269,25 @@ func ResolveTemplate(tmpl string, data TemplateData, errOnMissingTemplKeys bool) }, nil } + // Add func to access environment variables (case-insensitive) + funcMap["env"] = func(name string) (string, error) { + if name == "" { + return "", fmt.Errorf(`"env" requires a variable name argument`) + } + // Try exact match first + if value, ok := data.Variables[name]; ok { + return value, nil + } + // Try case-insensitive match + lowerName := strings.ToLower(name) + for key, value := range data.Variables { + if strings.ToLower(key) == lowerName { + return value, nil + } + } + return "", fmt.Errorf(`environment variable "%s" not found`, name) + } + // Parse template (error on missing keys) // TODO: missingkey=error may be problematic for claims. var opt string diff --git a/runtime/parser/template_test.go b/runtime/parser/template_test.go index d7caf551422..daab4b9d419 100644 --- a/runtime/parser/template_test.go +++ b/runtime/parser/template_test.go @@ -65,6 +65,17 @@ func TestAnalyze(t *testing.T) { ResolvedWithPlaceholders: `SELECT * FROM WITH SAMPLING .... `, }, }, + { + name: "env function", + template: `SELECT * FROM {{ env "partner_table_name" }}`, + want: &TemplateMetadata{ + Refs: []ResourceName{}, + Config: map[string]any{}, + Variables: []string{}, + UsesTemplating: true, + ResolvedWithPlaceholders: `SELECT * FROM`, + }, + }, } for _, tc := range tt { @@ -112,6 +123,106 @@ func TestVariables(t *testing.T) { require.Equal(t, "a=1 b.a=2 b.a=2", resolved) } +func TestEnvFunction(t *testing.T) { + tests := []struct { + name string + template string + data TemplateData + want string + wantErr bool + }{ + { + name: "basic env function", + template: `SELECT * FROM {{ env "table_name" }}`, + data: TemplateData{ + Variables: map[string]string{ + "table_name": "my_table", + }, + }, + want: "SELECT * FROM my_table", + wantErr: false, + }, + { + name: "env function with multiple vars", + template: `FROM {{ env "db" }}.{{ env "schema" }}.{{ env "table" }}`, + data: TemplateData{ + Variables: map[string]string{ + "db": "mydb", + "schema": "public", + "table": "users", + }, + }, + want: "FROM mydb.public.users", + wantErr: false, + }, + { + name: "env function missing variable", + template: `SELECT * FROM {{ env "missing_var" }}`, + data: TemplateData{ + Variables: map[string]string{ + "table_name": "my_table", + }, + }, + want: "", + wantErr: true, + }, + { + name: "env function with empty name", + template: `SELECT * FROM {{ env "" }}`, + data: TemplateData{ + Variables: map[string]string{}, + }, + want: "", + wantErr: true, + }, + { + name: "env function case insensitive - lowercase lookup", + template: `SELECT * FROM {{ env "table_name" }}`, + data: TemplateData{ + Variables: map[string]string{ + "TABLE_NAME": "my_table", + }, + }, + want: "SELECT * FROM my_table", + wantErr: false, + }, + { + name: "env function case insensitive - uppercase lookup", + template: `SELECT * FROM {{ env "TABLE_NAME" }}`, + data: TemplateData{ + Variables: map[string]string{ + "table_name": "my_table", + }, + }, + want: "SELECT * FROM my_table", + wantErr: false, + }, + { + name: "env function case insensitive - mixed case lookup", + template: `SELECT * FROM {{ env "TaBlE_NaMe" }}`, + data: TemplateData{ + Variables: map[string]string{ + "table_name": "my_table", + }, + }, + want: "SELECT * FROM my_table", + wantErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resolved, err := ResolveTemplate(tt.template, tt.data, false) + if tt.wantErr { + require.Error(t, err) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, resolved) + } + }) + } +} + func TestAsSQLList(t *testing.T) { tests := []struct { name string From 7a8f2c0c9b20466b42ed09ab02925ab0854f6864 Mon Sep 17 00:00:00 2001 From: royendo <67675319+royendo@users.noreply.github.com> Date: Tue, 13 Jan 2026 16:19:47 -0500 Subject: [PATCH 2/6] differnet rules for generic vs passowrds etc. --- .../src/features/connectors/code-utils.ts | 96 +++++++++++++++++-- 1 file changed, 89 insertions(+), 7 deletions(-) diff --git a/web-common/src/features/connectors/code-utils.ts b/web-common/src/features/connectors/code-utils.ts index edb0bcb4e06..3ca022e7d9f 100644 --- a/web-common/src/features/connectors/code-utils.ts +++ b/web-common/src/features/connectors/code-utils.ts @@ -40,6 +40,7 @@ export function compileConnectorYAML( fieldFilter?: (property: ConnectorDriverProperty) => boolean; orderedProperties?: ConnectorDriverProperty[]; connectorInstanceName?: string; + existingEnvBlob?: string; }, ) { // Add instructions to the top of the file @@ -103,7 +104,7 @@ driver: ${getDriverNameForConnector(connector.name as string)}`; return `${key}: "{{ .env.${makeDotEnvConnectorKey( connector.name as string, key, - options?.connectorInstanceName, + options?.existingEnvBlob, )} }}"`; } @@ -131,16 +132,19 @@ export async function updateDotEnvWithSecrets( // Get the existing .env file let blob: string; + let originalBlob: string; try { const file = await queryClient.fetchQuery({ queryKey: getRuntimeServiceGetFileQueryKey(instanceId, { path: ".env" }), queryFn: () => runtimeServiceGetFile(instanceId, { path: ".env" }), }); blob = file.blob || ""; + originalBlob = blob; // Keep original for conflict detection } catch (error) { // Handle the case where the .env file does not exist if (error?.response?.data?.message?.includes("no such file")) { blob = ""; + originalBlob = ""; } else { throw error; } @@ -161,6 +165,7 @@ export async function updateDotEnvWithSecrets( } // Update the blob with the new secrets + // Use originalBlob for conflict detection so all secrets use consistent naming secretKeys.forEach((key) => { if (!key || !formValues[key]) { return; @@ -169,7 +174,7 @@ export async function updateDotEnvWithSecrets( const connectorSecretKey = makeDotEnvConnectorKey( connector.name as string, key, - connectorInstanceName, + originalBlob, ); blob = replaceOrAddEnvVariable( @@ -224,15 +229,92 @@ export function deleteEnvVariable( return newBlob; } +/** + * Get a generic ALL_CAPS environment variable name + * Generic properties (AWS, Google, etc.) use no prefix + * Driver-specific properties use DriverName_PropertyKey format + */ +function getGenericEnvVarName(driverName: string, propertyKey: string): string { + // Generic properties that don't need a driver prefix + const genericProperties = new Set([ + // Google Cloud credentials + "google_application_credentials", + "key_id", + "secret", + // AWS credentials (used by S3, Athena, Redshift, etc.) + "aws_access_key_id", + "aws_secret_access_key", + // Azure + "azure_storage_connection_string", + "azure_storage_key", + "azure_storage_sas_token", + "azure_storage_account", + // Snowflake + "privateKey", + + ]); + + // Convert property key to SCREAMING_SNAKE_CASE + const propertyKeyUpper = propertyKey + .replace(/([a-z])([A-Z])/g, "$1_$2") + .replace(/[._-]+/g, "_") + .toUpperCase(); + + // If it's a generic property, return just the property name + if (genericProperties.has(propertyKey.toLowerCase())) { + return propertyKeyUpper; + } + + // Otherwise, use DriverName_PropertyKey format + const driverNameUpper = driverName + .replace(/([a-z])([A-Z])/g, "$1_$2") + .replace(/[._-]+/g, "_") + .toUpperCase(); + + return `${driverNameUpper}_${propertyKeyUpper}`; +} + +/** + * Check if an environment variable exists in the env blob + */ +function envVarExists(envBlob: string, varName: string): boolean { + const lines = envBlob.split("\n"); + return lines.some((line) => line.startsWith(`${varName}=`)); +} + +/** + * Find the next available environment variable name by appending _1, _2, etc. + */ +function findAvailableEnvVarName( + envBlob: string, + baseName: string, +): string { + let varName = baseName; + let counter = 1; + + while (envVarExists(envBlob, varName)) { + varName = `${baseName}_${counter}`; + counter++; + } + + return varName; +} + export function makeDotEnvConnectorKey( driverName: string, key: string, - connectorInstanceName?: string, + existingEnvBlob?: string, ) { - // Note: The connector instance name is used when provided, otherwise fall back to driver name. - // This enables configuring multiple connectors that use the same driver with unique env keys. - const nameToUse = connectorInstanceName || driverName; - return `connector.${nameToUse}.${key}`; + // Generate generic ALL_CAPS environment variable name + const baseGenericName = getGenericEnvVarName(driverName, key); + + // If no existing env blob is provided, just return the base generic name + if (!existingEnvBlob) { + return baseGenericName; + } + + // Check for conflicts and append _# if necessary + return findAvailableEnvVarName(existingEnvBlob, baseGenericName); } export async function updateRillYAMLWithOlapConnector( From f873ad3fffc908c0dd3b061bd4b0d52aa07f9c8b Mon Sep 17 00:00:00 2001 From: royendo <67675319+royendo@users.noreply.github.com> Date: Tue, 13 Jan 2026 16:20:07 -0500 Subject: [PATCH 3/6] fix the _# generating of creds - file name --- .../sources/modal/submitAddDataForm.ts | 41 +++++++++++++++---- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/web-common/src/features/sources/modal/submitAddDataForm.ts b/web-common/src/features/sources/modal/submitAddDataForm.ts index d9bdbd6f332..ebd7067b662 100644 --- a/web-common/src/features/sources/modal/submitAddDataForm.ts +++ b/web-common/src/features/sources/modal/submitAddDataForm.ts @@ -51,6 +51,8 @@ const connectorSubmissions = new Map< promise: Promise; connectorName: string; completed: boolean; + originalEnvBlob?: string; + newEnvBlob?: string; } >(); @@ -160,6 +162,8 @@ async function saveConnectorAnyway( formValues: AddDataFormValues, newConnectorName: string, instanceId?: string, + preComputedEnvBlob?: string, + originalEnvBlob?: string, ): Promise { const resolvedInstanceId = instanceId ?? get(runtime).instanceId; @@ -172,14 +176,19 @@ async function saveConnectorAnyway( // Mark to avoid rollback by concurrent submissions savedAnywayPaths.add(newConnectorFilePath); - // Update .env file with secrets (keep ordering consistent with Test and Connect) - const newEnvBlob = await updateDotEnvWithSecrets( - queryClient, - connector, - formValues, - "connector", - newConnectorName, - ); + // Use pre-computed env blob if provided to avoid re-computing and getting _1 suffix + // when the first attempt already added the variable + let newEnvBlob = preComputedEnvBlob; + if (!newEnvBlob) { + // Fallback to computing if not provided (for backwards compatibility) + newEnvBlob = await updateDotEnvWithSecrets( + queryClient, + connector, + formValues, + "connector", + newConnectorName, + ); + } await runtimeServicePutFile(resolvedInstanceId, { path: ".env", @@ -189,10 +198,12 @@ async function saveConnectorAnyway( }); // Always create/overwrite to ensure the connector file is created immediately + // Use originalEnvBlob (before modifications) to check conflicts, not the modified blob await runtimeServicePutFile(resolvedInstanceId, { path: newConnectorFilePath, blob: compileConnectorYAML(connector, formValues, { connectorInstanceName: newConnectorName, + existingEnvBlob: originalEnvBlob, }), create: true, createOnly: false, @@ -243,12 +254,16 @@ export async function submitAddConnectorForm( const newConnectorName = existingSubmission.connectorName; // Proceed immediately with Save Anyway logic + // Use the pre-computed env blobs from the concurrent Test and Connect operation + // to ensure consistent variable naming (e.g., GOOGLE_APPLICATION_CREDENTIALS not _2) await saveConnectorAnyway( queryClient, connector, formValues, newConnectorName, instanceId, + existingSubmission.newEnvBlob, + existingSubmission.originalEnvBlob, ); return; } else if (!existingSubmission.completed) { @@ -290,6 +305,13 @@ export async function submitAddConnectorForm( newConnectorName, ); + // Store the computed blobs in the submission so concurrent "Save Anyway" can reuse them + const submission = connectorSubmissions.get(uniqueConnectorSubmissionKey); + if (submission) { + submission.originalEnvBlob = originalEnvBlob; + submission.newEnvBlob = newEnvBlob; + } + if (saveAnyway) { // Save Anyway: bypass reconciliation entirely via centralized helper await saveConnectorAnyway( @@ -298,6 +320,8 @@ export async function submitAddConnectorForm( formValues, newConnectorName, instanceId, + newEnvBlob, + originalEnvBlob, ); return; } @@ -322,6 +346,7 @@ export async function submitAddConnectorForm( path: newConnectorFilePath, blob: compileConnectorYAML(connector, formValues, { connectorInstanceName: newConnectorName, + existingEnvBlob: originalEnvBlob, }), create: true, createOnly: false, From 4a08f53c50d5d525111a4dfc1f499ce23981fcaf Mon Sep 17 00:00:00 2001 From: royendo <67675319+royendo@users.noreply.github.com> Date: Tue, 13 Jan 2026 16:24:54 -0500 Subject: [PATCH 4/6] prettier --- web-common/src/features/connectors/code-utils.ts | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/web-common/src/features/connectors/code-utils.ts b/web-common/src/features/connectors/code-utils.ts index 3ca022e7d9f..a88260fb79b 100644 --- a/web-common/src/features/connectors/code-utils.ts +++ b/web-common/src/features/connectors/code-utils.ts @@ -251,7 +251,6 @@ function getGenericEnvVarName(driverName: string, propertyKey: string): string { "azure_storage_account", // Snowflake "privateKey", - ]); // Convert property key to SCREAMING_SNAKE_CASE @@ -285,10 +284,7 @@ function envVarExists(envBlob: string, varName: string): boolean { /** * Find the next available environment variable name by appending _1, _2, etc. */ -function findAvailableEnvVarName( - envBlob: string, - baseName: string, -): string { +function findAvailableEnvVarName(envBlob: string, baseName: string): string { let varName = baseName; let counter = 1; From 8e338d8818c27b0a7cc7ff8366708fc845ed129c Mon Sep 17 00:00:00 2001 From: royendo <67675319+royendo@users.noreply.github.com> Date: Tue, 13 Jan 2026 16:39:07 -0500 Subject: [PATCH 5/6] phase 1 doc updates --- docs/docs/build/connectors/credentials.md | 83 +++++++++++++++++-- .../build/connectors/data-source/athena.md | 6 +- .../build/connectors/data-source/azure.md | 6 +- .../build/connectors/data-source/bigquery.md | 2 +- docs/docs/build/connectors/data-source/gcs.md | 12 +-- .../build/connectors/data-source/https.md | 2 +- .../build/connectors/data-source/mysql.md | 2 +- .../build/connectors/data-source/openai.md | 4 +- .../build/connectors/data-source/postgres.md | 2 +- .../build/connectors/data-source/redshift.md | 4 +- docs/docs/build/connectors/data-source/s3.md | 4 +- .../build/connectors/data-source/snowflake.md | 2 +- docs/docs/build/connectors/olap/clickhouse.md | 4 +- docs/docs/build/connectors/olap/druid.md | 4 +- docs/docs/build/connectors/olap/motherduck.md | 4 +- docs/docs/build/connectors/olap/pinot.md | 2 +- docs/docs/build/connectors/olap/starrocks.md | 6 +- docs/docs/build/connectors/templating.md | 36 +++++--- docs/docs/build/debugging/index.md | 2 +- docs/docs/guides/performance.md | 12 +-- .../reference/project-files/connectors.md | 4 +- 21 files changed, 143 insertions(+), 60 deletions(-) diff --git a/docs/docs/build/connectors/credentials.md b/docs/docs/build/connectors/credentials.md index 360f6038f7f..046b486684e 100644 --- a/docs/docs/build/connectors/credentials.md +++ b/docs/docs/build/connectors/credentials.md @@ -51,14 +51,15 @@ The `.env` file serves several important purposes: Example `.env` file: ```bash # AWS S3 credentials -connector.s3.access_key_id=AKIAIOSFODNN7EXAMPLE -connector.s3.secret_access_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY +AWS_ACCESS_KEY_ID=AKIAIOSFODNN7EXAMPLE +AWS_SECRET_ACCESS_KEY=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY # Google Cloud credentials -connector.gcs.credentials_json={"type":"service_account","project_id":"my-project"} +GOOGLE_APPLICATION_CREDENTIALS={"type":"service_account","project_id":"my-project"} -# Database connection -connector.postgres.dsn=postgres://username:password@localhost:5432/mydb +# Database connections +POSTGRES_PASSWORD=mypassword +SNOWFLAKE_PASSWORD=mysnowflakepassword # Custom variables my_custom_variable=some_value @@ -67,9 +68,77 @@ When creating any connector in Rill via the UI, these will be **automatically ge Additional variables can then be usable and referenceable for [templating](/build/connectors/templating) purposes in the local instance of your project. -### Credentials Naming Schema +### Credentials Naming Schema -Connector credentials are essentially a form of project variable, prefixed using the `connector..` syntax. For example, `connector.druid.dsn` and `connector.clickhouse.dsn` are both hard-coded project variables (that happen to correspond to the [Druid](/build/connectors/olap/druid) and [ClickHouse](/build/connectors/olap/clickhouse) OLAP engines respectively). Please see below for each source and its required properties. If you have any questions or need specifics, [contact us](/contact)! +When you create a connector through Rill's UI, credentials are automatically saved to your `.env` file using a standardized naming convention: + +#### Generic Credentials (Shared Across Connectors) + +Common cloud provider credentials use standard names without a driver prefix: + +| Property | Environment Variable | +|----------|---------------------| +| Google Application Credentials | `GOOGLE_APPLICATION_CREDENTIALS` | +| AWS Access Key ID | `AWS_ACCESS_KEY_ID` | +| AWS Secret Access Key | `AWS_SECRET_ACCESS_KEY` | +| Azure Storage Connection String | `AZURE_STORAGE_CONNECTION_STRING` | +| Azure Storage Key | `AZURE_STORAGE_KEY` | +| Azure Storage SAS Token | `AZURE_STORAGE_SAS_TOKEN` | +| Snowflake Private Key | `PRIVATE_KEY` | + +#### Driver-Specific Credentials + +Credentials specific to a database driver use the `DRIVER_PROPERTY` format: + +| Driver | Property | Environment Variable | +|--------|----------|---------------------| +| PostgreSQL | password | `POSTGRES_PASSWORD` | +| PostgreSQL | dsn | `POSTGRES_DSN` | +| MySQL | password | `MYSQL_PASSWORD` | +| Snowflake | password | `SNOWFLAKE_PASSWORD` | +| ClickHouse | password | `CLICKHOUSE_PASSWORD` | + +#### Handling Multiple Connectors + +When you create multiple connectors that use the same credential type, Rill automatically appends a numeric suffix to avoid conflicts: + +```bash +# First BigQuery connector +GOOGLE_APPLICATION_CREDENTIALS={"type":"service_account",...} + +# Second BigQuery connector +GOOGLE_APPLICATION_CREDENTIALS_1={"type":"service_account",...} + +# Third BigQuery connector +GOOGLE_APPLICATION_CREDENTIALS_2={"type":"service_account",...} +``` + +This ensures each connector can reference its own credentials without overwriting existing ones. + +#### Referencing Variables in YAML + +Use the `{{ .env.VARIABLE_NAME }}` syntax to reference environment variables in your connector YAML files: + +```yaml +google_application_credentials: "{{ .env.GOOGLE_APPLICATION_CREDENTIALS }}" +password: "{{ .env.POSTGRES_PASSWORD }}" +aws_access_key_id: "{{ .env.AWS_ACCESS_KEY_ID }}" +``` + +#### Case-Insensitive Variable Lookups + +The `{{ env "VAR_NAME" }}` function provides case-insensitive variable lookups, which can be useful when variable names may have inconsistent casing: + +```yaml +# All of these will match POSTGRES_PASSWORD in your .env file: +password: '{{ env "POSTGRES_PASSWORD" }}' +password: '{{ env "postgres_password" }}' +password: '{{ env "Postgres_Password" }}' +``` + +:::note Legacy Naming Convention +Older projects may use the `connector..` syntax (e.g., `connector.druid.dsn`, `connector.clickhouse.dsn`). This format is still supported for backwards compatibility. +::: :::tip Avoid committing sensitive information to Git diff --git a/docs/docs/build/connectors/data-source/athena.md b/docs/docs/build/connectors/data-source/athena.md index 02fc25459b5..e602527267f 100644 --- a/docs/docs/build/connectors/data-source/athena.md +++ b/docs/docs/build/connectors/data-source/athena.md @@ -29,14 +29,14 @@ Create a connector with your credentials to connect to Athena. Here's an example type: connector driver: athena -aws_access_key_id: "{{ .env.connector.athena.aws_access_key_id }}" -aws_secret_access_key: "{{ .env.connector.athena.aws_secret_access_key }}" +aws_access_key_id: "{{ .env.AWS_ACCESS_KEY_ID }}" +aws_secret_access_key: "{{ .env.AWS_SECRET_ACCESS_KEY }}" output_location: "s3://bucket/path/folder" region: "us-east-1" ``` :::tip Using the Add Data Form -You can also use the Add Data form in Rill Developer, which will automatically create the `athena.yaml` file and populate the `.env` file with `connector.athena.aws_access_key_id` and `connector.athena.aws_secret_access_key`. +You can also use the Add Data form in Rill Developer, which will automatically create the `athena.yaml` file and populate the `.env` file with `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. ::: ### Local AWS Credentials (Local Development Only) diff --git a/docs/docs/build/connectors/data-source/azure.md b/docs/docs/build/connectors/data-source/azure.md index 161991d32a3..6bfec143582 100644 --- a/docs/docs/build/connectors/data-source/azure.md +++ b/docs/docs/build/connectors/data-source/azure.md @@ -35,7 +35,7 @@ type: connector driver: azure azure_storage_account: rilltest -azure_storage_key: "{{ .env.connector.azure.azure_storage_key }}" +azure_storage_key: "{{ .env.AZURE_STORAGE_KEY }}" ``` This approach ensures your Azure Blob Storage sources authenticate consistently across both local development and cloud deployment. Follow the [Azure Documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?tabs=azure-portal) to retrieve your storage account keys. @@ -49,7 +49,7 @@ type: connector driver: azure -azure_storage_connection_string: "{{ .env.connector.azure.azure_storage_connection_string }}" +azure_storage_connection_string: "{{ .env.AZURE_STORAGE_CONNECTION_STRING }}" ``` This approach ensures your Azure Blob Storage sources authenticate consistently across both local development and cloud deployment. Follow the [Azure Documentation](https://learn.microsoft.com/en-us/azure/storage/common/storage-account-keys-manage?tabs=azure-portal) to retrieve your connection string. @@ -64,7 +64,7 @@ type: connector driver: azure azure_storage_account: rilltest -azure_storage_sas_token: "{{ .env.connector.azure.azure_storage_sas_token }}" +azure_storage_sas_token: "{{ .env.AZURE_STORAGE_SAS_TOKEN }}" ``` This method provides fine-grained access control and enhanced security for your Azure Blob Storage connections. Follow the [Azure Documentation](https://learn.microsoft.com/en-us/azure/ai-services/translator/document-translation/how-to-guides/create-sas-tokens?tabs=Containers) to create your Azure SAS token. diff --git a/docs/docs/build/connectors/data-source/bigquery.md b/docs/docs/build/connectors/data-source/bigquery.md index 4cd459ee94f..a6fa23e317a 100644 --- a/docs/docs/build/connectors/data-source/bigquery.md +++ b/docs/docs/build/connectors/data-source/bigquery.md @@ -45,7 +45,7 @@ type: connector driver: bigquery -google_application_credentials: "{{ .env.connector.bigquery.google_application_credentials }}" +google_application_credentials: "{{ .env.GOOGLE_APPLICATION_CREDENTIALS }}" project_id: "rilldata" ``` diff --git a/docs/docs/build/connectors/data-source/gcs.md b/docs/docs/build/connectors/data-source/gcs.md index 1463bcd2d42..66c30ace99a 100644 --- a/docs/docs/build/connectors/data-source/gcs.md +++ b/docs/docs/build/connectors/data-source/gcs.md @@ -65,7 +65,7 @@ Create `connectors/my_gcs.yaml`: type: connector driver: gcs -google_application_credentials: "{{ .env.connector.gcs.google_application_credentials }}" +google_application_credentials: "{{ .env.GOOGLE_APPLICATION_CREDENTIALS }}" ``` **Step 2: Create model configuration** @@ -86,7 +86,7 @@ refresh: **Step 3: Add credentials to `.env`** ```bash -connector.gcs.google_application_credentials= +GOOGLE_APPLICATION_CREDENTIALS= ``` --- @@ -119,8 +119,8 @@ Create `connectors/my_gcs_hmac.yaml`: type: connector driver: gcs -key_id: "{{ .env.connector.gcs.key_id }}" -secret: "{{ .env.connector.gcs.secret }}" +key_id: "{{ .env.KEY_ID }}" +secret: "{{ .env.SECRET }}" ``` **Step 2: Create model configuration** @@ -141,8 +141,8 @@ refresh: **Step 3: Add credentials to `.env`** ```bash -connector.gcs.key_id=GOOG1234567890ABCDEFG -connector.gcs.secret=your-secret-access-key +KEY_ID=GOOG1234567890ABCDEFG +SECRET=your-secret-access-key ``` :::info diff --git a/docs/docs/build/connectors/data-source/https.md b/docs/docs/build/connectors/data-source/https.md index e319497f6f1..92dfc699fad 100644 --- a/docs/docs/build/connectors/data-source/https.md +++ b/docs/docs/build/connectors/data-source/https.md @@ -31,7 +31,7 @@ driver: https path: "https://api.endpoint.com/v1" headers: - Authorization: "Bearer {{ .env.connector.https.token }}" + Authorization: "Bearer {{ .env.HTTPS_TOKEN }}" ``` ### Public URLs diff --git a/docs/docs/build/connectors/data-source/mysql.md b/docs/docs/build/connectors/data-source/mysql.md index f10ff51be78..dde0fd61436 100644 --- a/docs/docs/build/connectors/data-source/mysql.md +++ b/docs/docs/build/connectors/data-source/mysql.md @@ -36,7 +36,7 @@ host: "localhost" port: 3306 database: "mydatabase" user: "myusername" -password: "{{ .env.connector.mysql.password }}" +password: "{{ .env.MYSQL_PASSWORD }}" ssl_mode: "DISABLED" ``` diff --git a/docs/docs/build/connectors/data-source/openai.md b/docs/docs/build/connectors/data-source/openai.md index 402a5faa51d..c66eb4e84ed 100644 --- a/docs/docs/build/connectors/data-source/openai.md +++ b/docs/docs/build/connectors/data-source/openai.md @@ -28,11 +28,11 @@ To configure OpenAI access, you'll need to obtain an API key from your OpenAI ac ```yaml type: connector driver: openai - api_key: "{{ .env.connector.openai.openai_api_key }}" + api_key: "{{ .env.OPENAI_API_KEY }}" ``` :::tip Security Best Practice -Never commit your OpenAI API key directly to your connector YAML files or version control. Always use environment variables with the `{{ .env.connector.openai.openai_api_key }}` syntax to keep sensitive credentials secure. +Never commit your OpenAI API key directly to your connector YAML files or version control. Always use environment variables with the `{{ .env.OPENAI_API_KEY }}` syntax to keep sensitive credentials secure. ::: 3. **Set up environment variable:** diff --git a/docs/docs/build/connectors/data-source/postgres.md b/docs/docs/build/connectors/data-source/postgres.md index bf60ff50f20..323b5e09242 100644 --- a/docs/docs/build/connectors/data-source/postgres.md +++ b/docs/docs/build/connectors/data-source/postgres.md @@ -33,7 +33,7 @@ driver: postgres host: "localhost" port: "5432" user: "postgres" -password: "{{ .env.connector.postgres.password }}" +password: "{{ .env.POSTGRES_PASSWORD }}" dbname: "postgres" ``` diff --git a/docs/docs/build/connectors/data-source/redshift.md b/docs/docs/build/connectors/data-source/redshift.md index 3267001a42a..1e3c05562fb 100644 --- a/docs/docs/build/connectors/data-source/redshift.md +++ b/docs/docs/build/connectors/data-source/redshift.md @@ -32,8 +32,8 @@ Create a connector with your credentials to connect to Redshift. Here's an examp type: connector driver: redshift -aws_access_key_id: "{{ .env.connector.redshift.aws_access_key_id }}" -aws_secret_access_key: "{{ .env.connector.redshift.aws_secret_access_key }}" +aws_access_key_id: "{{ .env.AWS_ACCESS_KEY_ID }}" +aws_secret_access_key: "{{ .env.AWS_SECRET_ACCESS_KEY }}" database: "dev" ``` diff --git a/docs/docs/build/connectors/data-source/s3.md b/docs/docs/build/connectors/data-source/s3.md index cda6fe80c96..68083ccf90f 100644 --- a/docs/docs/build/connectors/data-source/s3.md +++ b/docs/docs/build/connectors/data-source/s3.md @@ -34,8 +34,8 @@ Create a connector with your credentials to connect to S3. Here's an example con type: connector driver: s3 -aws_access_key_id: "{{ .env.connector.s3.aws_access_key_id }}" -aws_secret_access_key: "{{ .env.connector.s3.aws_secret_access_key }}" +aws_access_key_id: "{{ .env.AWS_ACCESS_KEY_ID }}" +aws_secret_access_key: "{{ .env.AWS_SECRET_ACCESS_KEY }}" ``` This approach ensures your AWS sources authenticate consistently across both local development and cloud deployment environments. diff --git a/docs/docs/build/connectors/data-source/snowflake.md b/docs/docs/build/connectors/data-source/snowflake.md index 9b9ef83a687..47ad59ed211 100644 --- a/docs/docs/build/connectors/data-source/snowflake.md +++ b/docs/docs/build/connectors/data-source/snowflake.md @@ -26,7 +26,7 @@ Create a connector with your credentials to connect to Snowflake. Here's an exam type: connector driver: snowflake -dsn: "{{ .env.connector.snowflake.dsn }}" +dsn: "{{ .env.SNOWFLAKE_DSN }}" ``` :::tip Using the Add Data Form diff --git a/docs/docs/build/connectors/olap/clickhouse.md b/docs/docs/build/connectors/olap/clickhouse.md index d2b192b77e5..2f9b012c8a8 100644 --- a/docs/docs/build/connectors/olap/clickhouse.md +++ b/docs/docs/build/connectors/olap/clickhouse.md @@ -36,7 +36,7 @@ driver: clickhouse host: port: username: -password: "{{ .env.connector.clickhouse.password }}" +password: "{{ .env.CLICKHOUSE_PASSWORD }}" ssl: true # required for ClickHouse Cloud ``` @@ -62,7 +62,7 @@ Once the file is created, it will be added directly to the `.env` file in the pr type: connector driver: clickhouse -dsn: "{{ .env.connector.clickhouse.dsn }}" +dsn: "{{ .env.CLICKHOUSE_DSN }}" ``` :::info Check your port diff --git a/docs/docs/build/connectors/olap/druid.md b/docs/docs/build/connectors/olap/druid.md index 80119f74cf9..7c5eb0643b2 100644 --- a/docs/docs/build/connectors/olap/druid.md +++ b/docs/docs/build/connectors/olap/druid.md @@ -24,12 +24,12 @@ driver: druid host: port: username: -password: "{{ .env.connector.druid.password }}" +password: "{{ .env.DRUID_PASSWORD }}" ssl: true # or -dsn: "{{ .env.connector.druid.dsn }}" +dsn: "{{ .env.DRUID_DSN }}" ``` 2. You can manually set `connector.druid.dsn` in your project's `.env` file or try pulling existing credentials locally using `rill env pull` if the project has already been deployed to Rill Cloud. diff --git a/docs/docs/build/connectors/olap/motherduck.md b/docs/docs/build/connectors/olap/motherduck.md index 0b406a1507f..2b813646cc1 100644 --- a/docs/docs/build/connectors/olap/motherduck.md +++ b/docs/docs/build/connectors/olap/motherduck.md @@ -40,7 +40,7 @@ Your MotherDuck access token provides access to your data. Keep it secure and ne ## Configuring Rill Developer with MotherDuck -Connect to your OLAP engine via Add Data. This will automatically create the motherduck.yaml file in your connectors folder and populate the .env file with `.connector.motherduck.token`. +Connect to your OLAP engine via Add Data. This will automatically create the motherduck.yaml file in your connectors folder and populate the .env file with `MOTHERDUCK_TOKEN`. For more information on supported parameters, see our [MotherDuck connector YAML reference docs](/reference/project-files/connectors#motherduck). @@ -49,7 +49,7 @@ For more information on supported parameters, see our [MotherDuck connector YAML type: connector driver: duckdb -token: '{{ .env.connector.motherduck.token }}' +token: '{{ .env.MOTHERDUCK_TOKEN }}' path: "md:my_database" schema_name: "my_schema" ``` diff --git a/docs/docs/build/connectors/olap/pinot.md b/docs/docs/build/connectors/olap/pinot.md index 31017333631..81255cc42e2 100644 --- a/docs/docs/build/connectors/olap/pinot.md +++ b/docs/docs/build/connectors/olap/pinot.md @@ -21,7 +21,7 @@ When using Rill for local development, there are a few options to configure Rill type: connector driver: pinot - dsn: "{{ .env.connector.pinot.dsn }}" + dsn: "{{ .env.PINOT_DSN }}" ``` 1. You can set `connector.pinot.dsn` in your project's `.env` file or try pulling existing credentials locally using `rill env pull` if the project has already been deployed to Rill Cloud. diff --git a/docs/docs/build/connectors/olap/starrocks.md b/docs/docs/build/connectors/olap/starrocks.md index ee615af73d5..fc3cafec9f4 100644 --- a/docs/docs/build/connectors/olap/starrocks.md +++ b/docs/docs/build/connectors/olap/starrocks.md @@ -26,7 +26,7 @@ driver: starrocks host: port: 9030 username: -password: "{{ .env.connector.starrocks.password }}" +password: "{{ .env.STARROCKS_PASSWORD }}" catalog: default_catalog database: ssl: false @@ -40,7 +40,7 @@ Rill can also connect to StarRocks using a DSN connection string. StarRocks uses type: connector driver: starrocks -dsn: "{{ .env.connector.starrocks.dsn }}" +dsn: "{{ .env.STARROCKS_DSN }}" ``` The DSN format is: @@ -80,7 +80,7 @@ driver: starrocks host: starrocks-fe.example.com port: 9030 username: analyst -password: "{{ .env.connector.starrocks.password }}" +password: "{{ .env.STARROCKS_PASSWORD }}" catalog: iceberg_catalog database: my_database ``` diff --git a/docs/docs/build/connectors/templating.md b/docs/docs/build/connectors/templating.md index b154373f336..5b387241caa 100644 --- a/docs/docs/build/connectors/templating.md +++ b/docs/docs/build/connectors/templating.md @@ -21,6 +21,20 @@ Unless explicitly defined, Rill Developer will use a `dev` environment. If you w You can set up environmental variables in several locations in Rill. Please review our [configure local credentials documentation](/build/connectors/credentials#setting-credentials-for-rill-developer) for more information. +## Referencing Environment Variables + +Reference environment variables in your YAML files using the `{{ .env.VARIABLE_NAME }}` syntax: + +```yaml +password: "{{ .env.POSTGRES_PASSWORD }}" +google_application_credentials: "{{ .env.GOOGLE_APPLICATION_CREDENTIALS }}" +aws_access_key_id: "{{ .env.AWS_ACCESS_KEY_ID }}" +``` + +:::tip Case-Insensitive Lookups +The `{{ env "VAR_NAME" }}` function is also available and provides case-insensitive variable lookups. +::: + ## Environment-Specific Connectors The most common use case for connector templating is defining separate databases for your development and production operations. This approach gives you the freedom to experiment, test, and iterate on your models without the risk of accidentally modifying or corrupting your production data. @@ -38,13 +52,13 @@ dev: # Production environment configuration prod: - host: "{{ .env.connector.clickhouse.host }}" - port: "{{ .env.connector.clickhouse.port }}" - database: "{{ .env.connector.clickhouse.database }}" - username: "{{ .env.connector.clickhouse.username }}" - password: "{{ .env.connector.clickhouse.password }}" + host: "{{ .env.CLICKHOUSE_HOST }}" + port: "{{ .env.CLICKHOUSE_PORT }}" + database: "{{ .env.CLICKHOUSE_DATABASE }}" + username: "{{ .env.CLICKHOUSE_USERNAME }}" + password: "{{ .env.CLICKHOUSE_PASSWORD }}" ssl: true - cluster: "{{ .env.connector.clickhouse.cluster }}" + cluster: "{{ .env.CLICKHOUSE_CLUSTER }}" ``` In this example: @@ -55,10 +69,10 @@ In this example: Some connectors will reference two unique databases and require two unique credentials for development and production. In this case, you can either define a unique environmental variable for both and reference them separately in the connector. -IE: -``` -"{{ .env.connector.dev_clickhouse.username }}" -"{{ .env.connector.prod_clickhouse.username }}" +For example: +```yaml +"{{ .env.DEV_CLICKHOUSE_USERNAME }}" +"{{ .env.PROD_CLICKHOUSE_USERNAME }}" ``` Or, by creating a separate connector altogether. @@ -81,7 +95,7 @@ dev: project_id: rilldata_dev project_id: rilldata -google_application_credentials: '{{ .env.connector.bigquery.google_application_credentials}}' +google_application_credentials: "{{ .env.GOOGLE_APPLICATION_CREDENTIALS }}" ``` ```yaml diff --git a/docs/docs/build/debugging/index.md b/docs/docs/build/debugging/index.md index d9f4f98db8b..8fddcadceb9 100644 --- a/docs/docs/build/debugging/index.md +++ b/docs/docs/build/debugging/index.md @@ -80,7 +80,7 @@ When connecting to a data source via a connector, you'll see a "Connector" being ```bash INFO Reconciling resource {"name": "gcs", "type": "Connector"} INFO Reconciled resource {"name": "gcs", "type": "Connector", "elapsed": "39ms"} -WARN Reconcile failed {"name": "gcs", "type": "Connector", "elapsed": "1ms", "error": "failed to resolve templated property \"google_application_credentials\": template: :1:6: executing \"\" at <.env.connector.gcs.google_application_credentialsss>: map has no entry for key \"google_application_credentials\""} +WARN Reconcile failed {"name": "gcs", "type": "Connector", "elapsed": "1ms", "error": "failed to resolve templated property \"google_application_credentials\": template: :1:6: executing \"\" at <.env.GOOGLE_APPLICATION_CREDENTIALS>: map has no entry for key \"GOOGLE_APPLICATION_CREDENTIALS\""} ``` Once connected, you'll likely create a model and see this also reconciling in the logs. Similar to the above, if there are any issues, you'll see it both in the logs and UI. diff --git a/docs/docs/guides/performance.md b/docs/docs/guides/performance.md index b8184d6b2a3..5dd031aa104 100644 --- a/docs/docs/guides/performance.md +++ b/docs/docs/guides/performance.md @@ -107,13 +107,13 @@ dev: # Production environment configuration prod: - host: "{{ .env.connector.clickhouse.host }}" - port: "{{ .env.connector.clickhouse.port }}" - database: "{{ .env.connector.clickhouse.database }}" - username: "{{ .env.connector.clickhouse.username }}" - password: "{{ .env.connector.clickhouse.password }}" + host: "{{ .env.CLICKHOUSE_HOST }}" + port: "{{ .env.CLICKHOUSE_PORT }}" + database: "{{ .env.CLICKHOUSE_DATABASE }}" + username: "{{ .env.CLICKHOUSE_USERNAME }}" + password: "{{ .env.CLICKHOUSE_PASSWORD }}" ssl: true - cluster: "{{ .env.connector.clickhouse.cluster }}" + cluster: "{{ .env.CLICKHOUSE_CLUSTER }}" ``` #### Limiting the source data to a smaller time range diff --git a/docs/docs/reference/project-files/connectors.md b/docs/docs/reference/project-files/connectors.md index dd6992c5ffa..4abcff414e0 100644 --- a/docs/docs/reference/project-files/connectors.md +++ b/docs/docs/reference/project-files/connectors.md @@ -39,7 +39,7 @@ Connector YAML files define how Rill connects to external data sources and OLAP - [**Slack**](#slack) - Slack data :::warning Security Recommendation -For all credential parameters (passwords, tokens, keys), use environment variables with the syntax `{{.env.connector..}}`. This keeps sensitive data out of your YAML files and version control. See our [credentials documentation](/build/connectors/credentials/) for complete setup instructions. +For all credential parameters (passwords, tokens, keys), use environment variables with the syntax `{{ .env.VARIABLE_NAME }}`. This keeps sensitive data out of your YAML files and version control. See our [credentials documentation](/build/connectors/credentials/) for complete setup instructions. ::: @@ -559,7 +559,7 @@ _[string, array]_ - List of connector names for which temporary secrets should b # Example: MotherDuck connector configuration type: connector # Must be `connector` (required) driver: duckdb # Must be `duckdb` _(required)_ -token: '{{ .env.connector.motherduck.token }}' # Set the MotherDuck token from your .env file _(required)_ +token: '{{ .env.MOTHERDUCK_TOKEN }}' # Set the MotherDuck token from your .env file _(required)_ path: "md:my_database" # Path to your MD database schema_name: "my_schema" # Define your schema if not main, uses main by default ``` From 21fac3d6cdf9b00f314dd807a849cce50159e4fa Mon Sep 17 00:00:00 2001 From: royendo <67675319+royendo@users.noreply.github.com> Date: Tue, 13 Jan 2026 16:50:09 -0500 Subject: [PATCH 6/6] revert code changes, keep docs only MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This branch is for documentation updates only. Code changes will be handled in the main PR. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 --- runtime/parser/template.go | 27 ----- runtime/parser/template_test.go | 111 ------------------ .../src/features/connectors/code-utils.ts | 92 ++------------- .../sources/modal/submitAddDataForm.ts | 41 ++----- 4 files changed, 15 insertions(+), 256 deletions(-) diff --git a/runtime/parser/template.go b/runtime/parser/template.go index eb89bffbdd5..8b47e2817c6 100644 --- a/runtime/parser/template.go +++ b/runtime/parser/template.go @@ -34,7 +34,6 @@ import ( // dependency [`kind`] `name`: register a dependency (parse time) // ref [`kind`] `name`: register a dependency at parse-time, resolve it to a name at resolve time (parse time and resolve time) // lookup [`kind`] `name`: lookup another resource (resolve time) -// env `name`: access a project "environment" variable (parse and resolve time) // .env.name: access a project "environment" variable (resolve time) // .user.attribute: access an attribute from auth claims (resolve time) // .meta: access the current resource's metadata (resolve time) @@ -129,13 +128,6 @@ func AnalyzeTemplate(tmpl string) (*TemplateMetadata, error) { refs[name] = true return map[string]any{}, nil } - funcMap["env"] = func(name string) (string, error) { - if name == "" { - return "", fmt.Errorf(`"env" requires a variable name argument`) - } - // At parse time, just return a placeholder - return "", nil - } // Parse template t, err := template.New("").Funcs(funcMap).Option("missingkey=default").Parse(tmpl) @@ -269,25 +261,6 @@ func ResolveTemplate(tmpl string, data TemplateData, errOnMissingTemplKeys bool) }, nil } - // Add func to access environment variables (case-insensitive) - funcMap["env"] = func(name string) (string, error) { - if name == "" { - return "", fmt.Errorf(`"env" requires a variable name argument`) - } - // Try exact match first - if value, ok := data.Variables[name]; ok { - return value, nil - } - // Try case-insensitive match - lowerName := strings.ToLower(name) - for key, value := range data.Variables { - if strings.ToLower(key) == lowerName { - return value, nil - } - } - return "", fmt.Errorf(`environment variable "%s" not found`, name) - } - // Parse template (error on missing keys) // TODO: missingkey=error may be problematic for claims. var opt string diff --git a/runtime/parser/template_test.go b/runtime/parser/template_test.go index daab4b9d419..d7caf551422 100644 --- a/runtime/parser/template_test.go +++ b/runtime/parser/template_test.go @@ -65,17 +65,6 @@ func TestAnalyze(t *testing.T) { ResolvedWithPlaceholders: `SELECT * FROM WITH SAMPLING .... `, }, }, - { - name: "env function", - template: `SELECT * FROM {{ env "partner_table_name" }}`, - want: &TemplateMetadata{ - Refs: []ResourceName{}, - Config: map[string]any{}, - Variables: []string{}, - UsesTemplating: true, - ResolvedWithPlaceholders: `SELECT * FROM`, - }, - }, } for _, tc := range tt { @@ -123,106 +112,6 @@ func TestVariables(t *testing.T) { require.Equal(t, "a=1 b.a=2 b.a=2", resolved) } -func TestEnvFunction(t *testing.T) { - tests := []struct { - name string - template string - data TemplateData - want string - wantErr bool - }{ - { - name: "basic env function", - template: `SELECT * FROM {{ env "table_name" }}`, - data: TemplateData{ - Variables: map[string]string{ - "table_name": "my_table", - }, - }, - want: "SELECT * FROM my_table", - wantErr: false, - }, - { - name: "env function with multiple vars", - template: `FROM {{ env "db" }}.{{ env "schema" }}.{{ env "table" }}`, - data: TemplateData{ - Variables: map[string]string{ - "db": "mydb", - "schema": "public", - "table": "users", - }, - }, - want: "FROM mydb.public.users", - wantErr: false, - }, - { - name: "env function missing variable", - template: `SELECT * FROM {{ env "missing_var" }}`, - data: TemplateData{ - Variables: map[string]string{ - "table_name": "my_table", - }, - }, - want: "", - wantErr: true, - }, - { - name: "env function with empty name", - template: `SELECT * FROM {{ env "" }}`, - data: TemplateData{ - Variables: map[string]string{}, - }, - want: "", - wantErr: true, - }, - { - name: "env function case insensitive - lowercase lookup", - template: `SELECT * FROM {{ env "table_name" }}`, - data: TemplateData{ - Variables: map[string]string{ - "TABLE_NAME": "my_table", - }, - }, - want: "SELECT * FROM my_table", - wantErr: false, - }, - { - name: "env function case insensitive - uppercase lookup", - template: `SELECT * FROM {{ env "TABLE_NAME" }}`, - data: TemplateData{ - Variables: map[string]string{ - "table_name": "my_table", - }, - }, - want: "SELECT * FROM my_table", - wantErr: false, - }, - { - name: "env function case insensitive - mixed case lookup", - template: `SELECT * FROM {{ env "TaBlE_NaMe" }}`, - data: TemplateData{ - Variables: map[string]string{ - "table_name": "my_table", - }, - }, - want: "SELECT * FROM my_table", - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - resolved, err := ResolveTemplate(tt.template, tt.data, false) - if tt.wantErr { - require.Error(t, err) - } else { - require.NoError(t, err) - require.Equal(t, tt.want, resolved) - } - }) - } -} - func TestAsSQLList(t *testing.T) { tests := []struct { name string diff --git a/web-common/src/features/connectors/code-utils.ts b/web-common/src/features/connectors/code-utils.ts index a88260fb79b..edb0bcb4e06 100644 --- a/web-common/src/features/connectors/code-utils.ts +++ b/web-common/src/features/connectors/code-utils.ts @@ -40,7 +40,6 @@ export function compileConnectorYAML( fieldFilter?: (property: ConnectorDriverProperty) => boolean; orderedProperties?: ConnectorDriverProperty[]; connectorInstanceName?: string; - existingEnvBlob?: string; }, ) { // Add instructions to the top of the file @@ -104,7 +103,7 @@ driver: ${getDriverNameForConnector(connector.name as string)}`; return `${key}: "{{ .env.${makeDotEnvConnectorKey( connector.name as string, key, - options?.existingEnvBlob, + options?.connectorInstanceName, )} }}"`; } @@ -132,19 +131,16 @@ export async function updateDotEnvWithSecrets( // Get the existing .env file let blob: string; - let originalBlob: string; try { const file = await queryClient.fetchQuery({ queryKey: getRuntimeServiceGetFileQueryKey(instanceId, { path: ".env" }), queryFn: () => runtimeServiceGetFile(instanceId, { path: ".env" }), }); blob = file.blob || ""; - originalBlob = blob; // Keep original for conflict detection } catch (error) { // Handle the case where the .env file does not exist if (error?.response?.data?.message?.includes("no such file")) { blob = ""; - originalBlob = ""; } else { throw error; } @@ -165,7 +161,6 @@ export async function updateDotEnvWithSecrets( } // Update the blob with the new secrets - // Use originalBlob for conflict detection so all secrets use consistent naming secretKeys.forEach((key) => { if (!key || !formValues[key]) { return; @@ -174,7 +169,7 @@ export async function updateDotEnvWithSecrets( const connectorSecretKey = makeDotEnvConnectorKey( connector.name as string, key, - originalBlob, + connectorInstanceName, ); blob = replaceOrAddEnvVariable( @@ -229,88 +224,15 @@ export function deleteEnvVariable( return newBlob; } -/** - * Get a generic ALL_CAPS environment variable name - * Generic properties (AWS, Google, etc.) use no prefix - * Driver-specific properties use DriverName_PropertyKey format - */ -function getGenericEnvVarName(driverName: string, propertyKey: string): string { - // Generic properties that don't need a driver prefix - const genericProperties = new Set([ - // Google Cloud credentials - "google_application_credentials", - "key_id", - "secret", - // AWS credentials (used by S3, Athena, Redshift, etc.) - "aws_access_key_id", - "aws_secret_access_key", - // Azure - "azure_storage_connection_string", - "azure_storage_key", - "azure_storage_sas_token", - "azure_storage_account", - // Snowflake - "privateKey", - ]); - - // Convert property key to SCREAMING_SNAKE_CASE - const propertyKeyUpper = propertyKey - .replace(/([a-z])([A-Z])/g, "$1_$2") - .replace(/[._-]+/g, "_") - .toUpperCase(); - - // If it's a generic property, return just the property name - if (genericProperties.has(propertyKey.toLowerCase())) { - return propertyKeyUpper; - } - - // Otherwise, use DriverName_PropertyKey format - const driverNameUpper = driverName - .replace(/([a-z])([A-Z])/g, "$1_$2") - .replace(/[._-]+/g, "_") - .toUpperCase(); - - return `${driverNameUpper}_${propertyKeyUpper}`; -} - -/** - * Check if an environment variable exists in the env blob - */ -function envVarExists(envBlob: string, varName: string): boolean { - const lines = envBlob.split("\n"); - return lines.some((line) => line.startsWith(`${varName}=`)); -} - -/** - * Find the next available environment variable name by appending _1, _2, etc. - */ -function findAvailableEnvVarName(envBlob: string, baseName: string): string { - let varName = baseName; - let counter = 1; - - while (envVarExists(envBlob, varName)) { - varName = `${baseName}_${counter}`; - counter++; - } - - return varName; -} - export function makeDotEnvConnectorKey( driverName: string, key: string, - existingEnvBlob?: string, + connectorInstanceName?: string, ) { - // Generate generic ALL_CAPS environment variable name - const baseGenericName = getGenericEnvVarName(driverName, key); - - // If no existing env blob is provided, just return the base generic name - if (!existingEnvBlob) { - return baseGenericName; - } - - // Check for conflicts and append _# if necessary - return findAvailableEnvVarName(existingEnvBlob, baseGenericName); + // Note: The connector instance name is used when provided, otherwise fall back to driver name. + // This enables configuring multiple connectors that use the same driver with unique env keys. + const nameToUse = connectorInstanceName || driverName; + return `connector.${nameToUse}.${key}`; } export async function updateRillYAMLWithOlapConnector( diff --git a/web-common/src/features/sources/modal/submitAddDataForm.ts b/web-common/src/features/sources/modal/submitAddDataForm.ts index ebd7067b662..d9bdbd6f332 100644 --- a/web-common/src/features/sources/modal/submitAddDataForm.ts +++ b/web-common/src/features/sources/modal/submitAddDataForm.ts @@ -51,8 +51,6 @@ const connectorSubmissions = new Map< promise: Promise; connectorName: string; completed: boolean; - originalEnvBlob?: string; - newEnvBlob?: string; } >(); @@ -162,8 +160,6 @@ async function saveConnectorAnyway( formValues: AddDataFormValues, newConnectorName: string, instanceId?: string, - preComputedEnvBlob?: string, - originalEnvBlob?: string, ): Promise { const resolvedInstanceId = instanceId ?? get(runtime).instanceId; @@ -176,19 +172,14 @@ async function saveConnectorAnyway( // Mark to avoid rollback by concurrent submissions savedAnywayPaths.add(newConnectorFilePath); - // Use pre-computed env blob if provided to avoid re-computing and getting _1 suffix - // when the first attempt already added the variable - let newEnvBlob = preComputedEnvBlob; - if (!newEnvBlob) { - // Fallback to computing if not provided (for backwards compatibility) - newEnvBlob = await updateDotEnvWithSecrets( - queryClient, - connector, - formValues, - "connector", - newConnectorName, - ); - } + // Update .env file with secrets (keep ordering consistent with Test and Connect) + const newEnvBlob = await updateDotEnvWithSecrets( + queryClient, + connector, + formValues, + "connector", + newConnectorName, + ); await runtimeServicePutFile(resolvedInstanceId, { path: ".env", @@ -198,12 +189,10 @@ async function saveConnectorAnyway( }); // Always create/overwrite to ensure the connector file is created immediately - // Use originalEnvBlob (before modifications) to check conflicts, not the modified blob await runtimeServicePutFile(resolvedInstanceId, { path: newConnectorFilePath, blob: compileConnectorYAML(connector, formValues, { connectorInstanceName: newConnectorName, - existingEnvBlob: originalEnvBlob, }), create: true, createOnly: false, @@ -254,16 +243,12 @@ export async function submitAddConnectorForm( const newConnectorName = existingSubmission.connectorName; // Proceed immediately with Save Anyway logic - // Use the pre-computed env blobs from the concurrent Test and Connect operation - // to ensure consistent variable naming (e.g., GOOGLE_APPLICATION_CREDENTIALS not _2) await saveConnectorAnyway( queryClient, connector, formValues, newConnectorName, instanceId, - existingSubmission.newEnvBlob, - existingSubmission.originalEnvBlob, ); return; } else if (!existingSubmission.completed) { @@ -305,13 +290,6 @@ export async function submitAddConnectorForm( newConnectorName, ); - // Store the computed blobs in the submission so concurrent "Save Anyway" can reuse them - const submission = connectorSubmissions.get(uniqueConnectorSubmissionKey); - if (submission) { - submission.originalEnvBlob = originalEnvBlob; - submission.newEnvBlob = newEnvBlob; - } - if (saveAnyway) { // Save Anyway: bypass reconciliation entirely via centralized helper await saveConnectorAnyway( @@ -320,8 +298,6 @@ export async function submitAddConnectorForm( formValues, newConnectorName, instanceId, - newEnvBlob, - originalEnvBlob, ); return; } @@ -346,7 +322,6 @@ export async function submitAddConnectorForm( path: newConnectorFilePath, blob: compileConnectorYAML(connector, formValues, { connectorInstanceName: newConnectorName, - existingEnvBlob: originalEnvBlob, }), create: true, createOnly: false,