Files
sure/.env.local.example
2026-01-25 16:02:57 +01:00

49 lines
1.6 KiB
Plaintext

# To enable / disable self-hosting features.
SELF_HOSTED = true
# Custom port config
# For users who have other applications listening at 3000, this allows them to set a value puma will listen to.
PORT=3000
# SimpleFIN runtime flags (default-off)
# Accepted truthy values: 1, true, yes, on
# SIMPLEFIN_DEBUG_RAW: when truthy, logs the raw payload returned by SimpleFIN (debug-only; can be noisy)
SIMPLEFIN_DEBUG_RAW=false
# SIMPLEFIN_INCLUDE_PENDING: when truthy, forces `pending=1` on SimpleFIN fetches when caller doesn't specify `pending:`
SIMPLEFIN_INCLUDE_PENDING=false
# Lunchflow runtime flags (default-off)
# LUNCHFLOW_DEBUG_RAW: when truthy, logs the raw payload returned by Lunchflow (debug-only; can be noisy)
LUNCHFLOW_DEBUG_RAW=false
# LUNCHFLOW_INCLUDE_PENDING: when truthy, adds `include_pending=true` to Lunchflow transaction fetch requests
LUNCHFLOW_INCLUDE_PENDING=false
# Controls onboarding flow (valid: open, closed, invite_only)
ONBOARDING_STATE = open
# Enable Twelve market data (careful, this will use your API credits)
TWELVE_DATA_API_KEY =
# OpenAI-compatible API endpoint config
OPENAI_ACCESS_TOKEN =
OPENAI_URI_BASE =
OPENAI_MODEL =
# (example: LM Studio/Docker config) OpenAI-compatible API endpoint config
# OPENAI_URI_BASE = http://host.docker.internal:1234/
# OPENAI_MODEL = qwen/qwen3-vl-4b
# OpenID Connect for development
OIDC_CLIENT_ID=
OIDC_CLIENT_SECRET=
OIDC_ISSUER=
OIDC_REDIRECT_URI=http://localhost:3000/auth/openid_connect/callback
# Langfuse config
LANGFUSE_PUBLIC_KEY =
LANGFUSE_SECRET_KEY =
LANGFUSE_HOST = https://cloud.langfuse.com
# Set to `true` to get error messages rendered in the /chats UI
AI_DEBUG_MODE =