import { useFieldArray, useForm } from "react-hook-form"; import { useState } from "react"; import { zodResolver } from "@hookform/resolvers/zod"; import { type BedrockConfig, type BedrockCredential, type VertexAIConfig, LLMAdapter, type LlmApiKeys, BEDROCK_USE_DEFAULT_CREDENTIALS, VERTEXAI_USE_DEFAULT_CREDENTIALS, } from "@langfuse/shared"; import { ChevronDown, PlusIcon, TrashIcon } from "lucide-react"; import { z } from "zod/v4"; import { Button } from "@/src/components/ui/button"; import { Form, FormControl, FormDescription, FormField, FormItem, FormLabel, FormMessage, } from "@/src/components/ui/form"; import { Input } from "@/src/components/ui/input"; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue, } from "@/src/components/ui/select"; import { Switch } from "@/src/components/ui/switch"; import { api } from "@/src/utils/api"; import { cn } from "@/src/utils/tailwind"; import { usePostHogClientCapture } from "@/src/features/posthog-analytics/usePostHogClientCapture"; import { type useUiCustomization } from "@/src/ee/features/ui-customization/useUiCustomization"; import { DialogFooter } from "@/src/components/ui/dialog"; import { DialogBody } from "@/src/components/ui/dialog"; import { env } from "@/src/env.mjs"; const isLangfuseCloud = Boolean(env.NEXT_PUBLIC_LANGFUSE_CLOUD_REGION); const isCustomModelsRequired = (adapter: LLMAdapter) => adapter === LLMAdapter.Azure || adapter === LLMAdapter.Bedrock; const createFormSchema = (mode: "create" | "update") => z .object({ secretKey: z.string().optional(), provider: z .string() .min(1, "Please add a provider name that identifies this connection.") .regex( /^[^:]+$/, "Provider name cannot contain colons. Use a format like 'OpenRouter_Mistral' instead.", ), adapter: z.nativeEnum(LLMAdapter), baseURL: z.union([z.literal(""), z.url()]), withDefaultModels: z.boolean(), customModels: z.array(z.object({ value: z.string().min(1) })), awsAccessKeyId: z.string().optional(), awsSecretAccessKey: z.string().optional(), awsRegion: z.string().optional(), vertexAILocation: z.string().optional(), extraHeaders: z.array( z.object({ key: z.string().min(1), value: mode === "create" ? z.string().min(1) : z.string().optional(), }), ), }) // 1) Bedrock validation - credentials required in create mode .refine( (data) => { if (data.adapter !== LLMAdapter.Bedrock) return true; // In update mode, credentials are optional (existing ones are preserved) if (mode === "update") { // Only validate region is present return data.awsRegion; } // In create mode, validate credentials // For cloud deployments, AWS credentials are required if (isLangfuseCloud) { return ( data.awsAccessKeyId && data.awsSecretAccessKey && data.awsRegion ); } // For self-hosted deployments, only region is required return data.awsRegion; }, { message: mode === "update" ? "AWS region is required." : isLangfuseCloud ? "AWS credentials are required for Bedrock" : "AWS region is required.", path: ["adapter"], }, ) .refine( (data) => { if (isCustomModelsRequired(data.adapter)) { return data.customModels.length > 0; } return true; }, { message: "At least one custom model is required for this adapter.", path: ["customModels"], }, ) // 2) For adapters that support defaults, require default models or at least one custom model .refine( (data) => { if (isCustomModelsRequired(data.adapter)) { return true; } return data.withDefaultModels || data.customModels.length > 0; }, { message: "At least one custom model name is required when default models are disabled.", path: ["withDefaultModels"], }, ) // Vertex AI validation - service account key or ADC sentinel value required .refine( (data) => { if (data.adapter !== LLMAdapter.VertexAI) return true; // In update mode, credentials are optional (existing ones are preserved) if (mode === "update") return true; // secretKey is required (either JSON key or VERTEXAI_USE_DEFAULT_CREDENTIALS sentinel) return !!data.secretKey; }, { message: isLangfuseCloud ? "GCP service account JSON key is required for Vertex AI" : "GCP service account JSON key or Application Default Credentials is required.", path: ["secretKey"], }, ) .refine( (data) => data.adapter === LLMAdapter.Bedrock || data.adapter === LLMAdapter.VertexAI || mode === "update" || data.secretKey, { message: "Secret key is required.", path: ["secretKey"], }, ) .refine( (data) => { if (data.adapter !== LLMAdapter.Azure) return true; return data.baseURL && data.baseURL.trim() !== ""; }, { message: "API Base URL is required for Azure connections.", path: ["baseURL"], }, ); interface CreateLLMApiKeyFormProps { projectId?: string; onSuccess: () => void; customization: ReturnType; mode?: "create" | "update"; existingKey?: LlmApiKeys; } export function CreateLLMApiKeyForm({ projectId, onSuccess, customization, mode = "create", existingKey, }: CreateLLMApiKeyFormProps) { const [showAdvancedSettings, setShowAdvancedSettings] = useState(false); const utils = api.useUtils(); const capture = usePostHogClientCapture(); const existingKeys = api.llmApiKey.all.useQuery( { projectId: projectId as string, }, { enabled: Boolean(projectId) }, ); const mutCreateLlmApiKey = api.llmApiKey.create.useMutation({ onSuccess: () => utils.llmApiKey.invalidate(), }); const mutUpdateLlmApiKey = api.llmApiKey.update.useMutation({ onSuccess: () => utils.llmApiKey.invalidate(), }); const mutTestLLMApiKey = api.llmApiKey.test.useMutation(); const mutTestUpdateLLMApiKey = api.llmApiKey.testUpdate.useMutation(); const defaultAdapter: LLMAdapter = customization?.defaultModelAdapter ? LLMAdapter[customization.defaultModelAdapter] : LLMAdapter.OpenAI; const getCustomizedBaseURL = (adapter: LLMAdapter) => { switch (adapter) { case LLMAdapter.OpenAI: return customization?.defaultBaseUrlOpenAI ?? ""; case LLMAdapter.Azure: return customization?.defaultBaseUrlAzure ?? ""; case LLMAdapter.Anthropic: return customization?.defaultBaseUrlAnthropic ?? ""; default: return ""; } }; const formSchema = createFormSchema(mode); const form = useForm({ resolver: zodResolver(formSchema), defaultValues: mode === "update" && existingKey ? { adapter: existingKey.adapter as LLMAdapter, provider: existingKey.provider, secretKey: existingKey.adapter === LLMAdapter.VertexAI && existingKey.displaySecretKey === "Default GCP credentials (ADC)" ? VERTEXAI_USE_DEFAULT_CREDENTIALS : "", baseURL: existingKey.baseURL ?? getCustomizedBaseURL(existingKey.adapter as LLMAdapter), withDefaultModels: existingKey.withDefaultModels, customModels: existingKey.customModels.map((value) => ({ value })), extraHeaders: existingKey.extraHeaderKeys?.map((key) => ({ key, value: "" })) ?? [], vertexAILocation: existingKey.adapter === LLMAdapter.VertexAI && existingKey.config ? ((existingKey.config as VertexAIConfig).location ?? "") : "", awsRegion: existingKey.adapter === LLMAdapter.Bedrock && existingKey.config ? ((existingKey.config as BedrockConfig).region ?? "") : "", awsAccessKeyId: "", awsSecretAccessKey: "", } : { adapter: defaultAdapter, provider: "", secretKey: "", baseURL: getCustomizedBaseURL(defaultAdapter), withDefaultModels: true, customModels: [], extraHeaders: [], vertexAILocation: "global", awsRegion: "", awsAccessKeyId: "", awsSecretAccessKey: "", }, }); const currentAdapter = form.watch("adapter"); const hasAdvancedSettings = (adapter: LLMAdapter) => adapter === LLMAdapter.OpenAI || adapter === LLMAdapter.Anthropic || adapter === LLMAdapter.VertexAI || adapter === LLMAdapter.GoogleAIStudio; const { fields, append, remove } = useFieldArray({ control: form.control, name: "customModels", }); const { fields: headerFields, append: appendHeader, remove: removeHeader, } = useFieldArray({ control: form.control, name: "extraHeaders", }); const renderCustomModelsField = () => ( ( Custom models Custom model names accepted by given endpoint. {currentAdapter === LLMAdapter.Azure && ( { "For Azure, the model name should be the same as the deployment name in Azure. For evals, choose a model with function calling capabilities." } )} {currentAdapter === LLMAdapter.Bedrock && ( { "For Bedrock, the model name is the Bedrock Inference Profile ID, e.g. 'eu.anthropic.claude-3-5-sonnet-20240620-v1:0'" } )} {fields.map((customModel, index) => ( ))} )} /> ); const renderExtraHeadersField = () => ( ( Extra Headers Optional additional HTTP headers to include with requests towards LLM provider. All header values stored encrypted{" "} {isLangfuseCloud ? "on our servers" : "in your database"}. {headerFields.map((header, index) => (
))}
)} /> ); // Disable provider and adapter fields in update mode const isFieldDisabled = (fieldName: string) => { if (mode !== "update") return false; return ["provider", "adapter"].includes(fieldName); }; async function onSubmit(values: z.infer) { if (!projectId) return console.error("No project ID found."); if (mode === "create") { if ( existingKeys?.data?.data .map((k) => k.provider) .includes(values.provider) ) { form.setError("provider", { type: "manual", message: "There already exists an API key for this provider.", }); return; } capture("project_settings:llm_api_key_create", { provider: values.provider, }); } else { capture("project_settings:llm_api_key_update", { provider: values.provider, }); } let secretKey = values.secretKey; let config: BedrockConfig | VertexAIConfig | undefined; if (currentAdapter === LLMAdapter.Bedrock) { // In update mode, only update credentials if provided if (mode === "update") { // Only update secretKey if both credentials are provided if (values.awsAccessKeyId && values.awsSecretAccessKey) { const credentials: BedrockCredential = { accessKeyId: values.awsAccessKeyId, secretAccessKey: values.awsSecretAccessKey, }; secretKey = JSON.stringify(credentials); } else { // Keep existing credentials by not setting secretKey secretKey = undefined; } } else { // In create mode, handle as before if ( !isLangfuseCloud && (!values.awsAccessKeyId || !values.awsSecretAccessKey) ) { secretKey = BEDROCK_USE_DEFAULT_CREDENTIALS; } else { const credentials: BedrockCredential = { accessKeyId: values.awsAccessKeyId ?? "", secretAccessKey: values.awsSecretAccessKey ?? "", }; secretKey = JSON.stringify(credentials); } } config = { region: values.awsRegion ?? "", }; } else if (currentAdapter === LLMAdapter.VertexAI) { // Handle Vertex AI credentials // secretKey already contains either JSON key or VERTEXAI_USE_DEFAULT_CREDENTIALS sentinel if (mode === "update") { // In update mode, only update secretKey if a new one is provided if (values.secretKey) { secretKey = values.secretKey; } else { // Keep existing credentials by not setting secretKey secretKey = undefined; } } // In create mode, secretKey is already set from values.secretKey // Build config with location only (projectId removed for security - ADC auto-detects) config = {}; if (values.vertexAILocation?.trim()) { config.location = values.vertexAILocation.trim(); } // If config is empty, set to undefined if (Object.keys(config).length === 0) { config = undefined; } } const extraHeaders = values.extraHeaders.length > 0 ? values.extraHeaders.reduce( (acc, header) => { acc[header.key] = header.value ?? ""; return acc; }, {} as Record, ) : undefined; const newLlmApiKey = { id: existingKey?.id ?? "", projectId, secretKey: secretKey ?? "", provider: values.provider, adapter: values.adapter, baseURL: values.baseURL || undefined, withDefaultModels: isCustomModelsRequired(currentAdapter) ? false : values.withDefaultModels, config, customModels: values.customModels .map((m) => m.value.trim()) .filter(Boolean), extraHeaders, }; try { const testResult = mode === "create" ? await mutTestLLMApiKey.mutateAsync(newLlmApiKey) : await mutTestUpdateLLMApiKey.mutateAsync(newLlmApiKey); if (!testResult.success) throw new Error(testResult.error); } catch (error) { form.setError("root", { type: "manual", message: error instanceof Error ? error.message : "Could not verify the API key.", }); return; } return (mode === "create" ? mutCreateLlmApiKey : mutUpdateLlmApiKey) .mutateAsync(newLlmApiKey) .then(() => { form.reset(); onSuccess(); }) .catch((error) => { console.error(error); }); } return (
{ e.stopPropagation(); // Prevent event bubbling to parent forms form.handleSubmit(onSubmit)(e); }} > {/* LLM adapter */} ( LLM adapter Schema that is accepted at that provider endpoint. )} /> {/* Provider name */} ( Provider name Key to identify the connection within Langfuse. Cannot contain colons. )} /> {/* API Key or AWS Credentials or Vertex AI Credentials */} {currentAdapter === LLMAdapter.Bedrock ? ( <> ( AWS Region {mode === "update" && existingKey?.config && (existingKey.config as BedrockConfig).region && ( Current:{" "} {(existingKey.config as BedrockConfig).region} )} )} /> ( AWS Access Key ID {!isLangfuseCloud && ( {" "} (optional) )} {mode === "update" ? "Leave empty to keep existing credentials. To update, provide both Access Key ID and Secret Access Key." : isLangfuseCloud ? "These should be long-lived credentials for an AWS user with `bedrock:InvokeModel` permission." : "For self-hosted deployments, AWS credentials are optional. When omitted, authentication will use the AWS SDK default credential provider chain."} )} /> ( AWS Secret Access Key {!isLangfuseCloud && ( {" "} (optional) )} )} /> {!isLangfuseCloud && (

Default credential provider chain: When AWS credentials are omitted, the system will automatically check for credentials in this order:

  • Environment variables (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
  • AWS credentials file (~/.aws/credentials)
  • IAM roles for EC2 instances
  • IAM roles for ECS tasks

Learn more about AWS credential providers →

)} ) : currentAdapter === LLMAdapter.VertexAI ? ( <> {/* Vertex AI ADC option for self-hosted only, create mode only */} {!isLangfuseCloud && mode === "create" && ( Use Application Default Credentials (ADC) When enabled, authentication uses the GCP environment's default credentials instead of a service account key. { if (checked) { form.setValue( "secretKey", VERTEXAI_USE_DEFAULT_CREDENTIALS, ); } else { form.setValue("secretKey", ""); } }} /> )} {/* Service Account Key - hidden when ADC is enabled */} {(isLangfuseCloud || form.watch("secretKey") !== VERTEXAI_USE_DEFAULT_CREDENTIALS) && ( ( GCP Service Account Key (JSON) {isLangfuseCloud ? "Your API keys are stored encrypted on our servers." : "Your API keys are stored encrypted in your database."} Paste your GCP service account JSON key here. The service account must have `Vertex AI User` role permissions. Example JSON:
                          {`{
  "type": "service_account",
  "project_id": "",
  "private_key_id": "",
  "private_key": "",
  "client_email": "",
  "client_id": "",
  "auth_uri": "",
  "token_uri": "",
  "auth_provider_x509_cert_url": "",
  "client_x509_cert_url": "",
}`}
                        
)} /> )} {/* ADC info box for self-hosted */} {!isLangfuseCloud && form.watch("secretKey") === VERTEXAI_USE_DEFAULT_CREDENTIALS && (

Application Default Credentials (ADC):{" "} When enabled, the system will automatically check for credentials in this order:

  • Environment variable (GOOGLE_APPLICATION_CREDENTIALS)
  • gcloud CLI credentials (gcloud auth application-default login)
  • GKE Workload Identity
  • Cloud Run service account
  • GCE instance service account (metadata service)

Learn more about GCP Application Default Credentials →

)} ) : ( ( API Key {isLangfuseCloud ? "Your API keys are stored encrypted on our servers." : "Your API keys are stored encrypted in your database."} )} /> )} {/* Azure Base URL - Always required for Azure */} {currentAdapter === LLMAdapter.Azure && ( ( API Base URL Please add the base URL in the following format (or compatible API): https://{instanceName}.openai.azure.com/openai/deployments )} /> )} {/* Custom models: top-level for Azure/Bedrock */} {isCustomModelsRequired(currentAdapter) && renderCustomModelsField()} {/* Extra headers - show for Azure in main section (Azure has no advanced settings) */} {currentAdapter === LLMAdapter.Azure && renderExtraHeadersField()} {hasAdvancedSettings(currentAdapter) && (
)} {hasAdvancedSettings(currentAdapter) && showAdvancedSettings && (
{/* baseURL */} ( API Base URL Leave blank to use the default base URL for the given LLM adapter.{" "} {currentAdapter === LLMAdapter.OpenAI && ( OpenAI default: https://api.openai.com/v1 )} {currentAdapter === LLMAdapter.Anthropic && ( Anthropic default: https://api.anthropic.com (excluding /v1/messages) )} )} /> {/* VertexAI Location */} {currentAdapter === LLMAdapter.VertexAI && ( ( Location (Optional) Google Cloud region (e.g., global, us-central1, europe-west4). Defaults to{" "} global as required for Gemini 3 models. )} /> )} {/* Extra Headers */} {currentAdapter === LLMAdapter.OpenAI && renderExtraHeadersField()} {/* With default models */} ( Enable default models Default models for the selected adapter will be available in Langfuse features. )} /> {/* Custom model names */} {!isCustomModelsRequired(currentAdapter) && renderCustomModelsField()}
)}
{form.formState.errors.root && ( {form.formState.errors.root.message} )}
); }