| _id | config | updatedAt | version | yaml | updated_at |
|---|---|---|---|---|---|
librechat_config
|
{
"version": "1.3.0",
"cache": true,
"memory": {
"disabled": false,
"recursionLimit": 15,
"validKeys": [
"daily_tracking",
"clients",
"ai_projects",
"seo_content",
"billing_payments",
"technical_infrastructure",
"meeting_notes",
"personal_family",
"prefrences",
"Tasks"
],
"tokenLimit": 1000000,
"charLimit": 50000,
"personalize": true,
"messageWindowSize": 10,
"agent": {
"provider": "openAI",
"model": "gpt-4o-mini",
"instructions": "You are Quinn's AI Companion Memory Assistant operating in Comprehensive Knowledge (All-Data + Lifelog) mode. Core principles: NO PARAPHRASING (store full context), AUTO-RETRIEVE (recall on every question), COMPREHENSIVE CAPTURE (all details with timestamps). Store: daily work activities, May Marketing clients (Cowboy Property, Flyers Edge, Woods Roofing, Pave Worx), AI projects with Notion DB IDs, SEO strategies, billing/payments, technical infrastructure (Railway, Docker, errors, file paths), meeting notes, personal/family (Raven), preferences, and tasks. Auto-store: online searches (Google, Reddit, YouTube), code work (file paths, errors, fixes), session tracking, Q&A logs. Format: Use 'MMM Dth YYYY' dates, include timestamps, preserve full paragraphs/specs without summarizing. Always use user ID 'quinn' (lowercase only). Prioritize accuracy and completeness.",
"model_parameters": {
"temperature": 0.1
}
}
},
"interface": {
"thinkingIndicatorText": "I'm Thinking ...",
"privacyPolicy": {
"externalUrl": "https://librechat.ai/privacy-policy",
"openNewTab": true
},
"termsOfService": {
"externalUrl": "https://librechat.ai/tos",
"openNewTab": true
}
},
"registration": {
"socialLogins": [
"discord",
"facebook",
"github",
"google",
"openid"
]
},
"speech": {
"tts": {
"elevenlabs": {
"apiKey": "${ELEVENLABS_API_KEY}",
"model": "eleven_multilingual_v2",
"voices": [
"2EiwWnXFnvU5JabPnv8n",
"CwhRBWXzGAHq8TQ4Fs17",
"EXAVITQu4vr4xnSDxMaL",
"FGY2WhTYpPnrIDTdsKH5",
"IKne3meq5aSn9XLyUdCD",
"JBFqnCBsd6RMkjVDRZzb",
"N2lVS1w4EtoT3dr4eOWO",
"SAz9YHcvj6GT2YYXdXww",
"SOYHLrjzK2X1ezoPC6cr",
"TX3LPaxmHKxFdv7VOQHJ",
"MnUw1cSnpiLoLhpd3Hqp",
"7YaUDeaStRuoYg3FKsmU"
]
}
},
"stt": {
"openai": {
"url": "https://api.groq.com/openai/v1",
"apiKey": "${GROQ_API_KEY}",
"model": "whisper-large-v3"
}
},
"speechTab": {
"textToSpeech": {
"engineTTS": "elevenlabs",
"voice": "EXAVITQu4vr4xnSDxMaL",
"automaticPlayback": false,
"playbackRate": 1,
"cacheTTS": true
},
"speechToText": {
"engineSTT": "openai",
"model": "whisper-large-v3"
}
}
},
"endpoints": {
"openAI": {
"baseURL": "https://api.supermemory.ai/v3/https://api.openai.com/v1",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"addParams": {
"store": true
},
"titleModel": "gpt-4o-mini",
"summaryModel": "gpt-4o-mini"
},
"google": false,
"anthropic": false,
"plugins": false,
"assistants": {
"disableBuilder": false,
"pollIntervalMs": 750,
"timeoutMs": 180000,
"capabilities": [
"tools",
"actions"
],
"disableCodeInterpreter": true
},
"agents": {
"disableBuilder": false,
"recursionLimit": 25,
"maxRecursionLimit": 50,
"allowedProviders": [
"Vercel AI",
"groq",
"openAI",
"google"
],
"capabilities": [
"execute_code",
"file_search",
"actions",
"tools",
"artifacts",
"context",
"web_search"
],
"maxCitations": 20,
"maxCitationsPerFile": 5,
"minRelevanceScore": 0.45
},
"custom": [
{
"name": "Quinn AI",
"iconURL": "https://chat.combinedmemory.com/logo.png",
"apiKey": "${OPENAI_API_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://api.openai.com/v1",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"models": {
"default": [
"ft:gpt-4o-2024-08-06:may-marketing-seo::CVi7ogNH"
],
"fetch": false
},
"maxContextTokens": 128000,
"titleConvo": true,
"titleModel": "ft:gpt-4o-2024-08-06:may-marketing-seo::CVi7ogNH",
"modelDisplayLabel": "Quinn AI",
"addParams": {
"store": true
},
"modelParameters": {
"temperature": 0.7,
"top_p": 0.9,
"frequency_penalty": 0.3,
"presence_penalty": 0.1
},
"systemMessage": "You're a friendly conversational agent roleplaying as Quinn May. You have access to memory tools to remember past conversations and context. Keep the conversation natural and engaging.",
"disableCodeInterpreter": true,
"tools": [
"codebase_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
},
{
"name": "groq",
"apiKey": "${GROQ_API_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://api.groq.com/openai/v1/",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"models": {
"default": [
"llama-3.3-70b-versatile",
"llama-3.1-8b-instant",
"openai/gpt-oss-120b",
"openai/gpt-oss-20b",
"llama3-70b-8192",
"llama3-8b-8192",
"llama-guard-3-8b",
"meta-llama/llama-4-maverick-17b-128e-instruct",
"meta-llama/llama-4-scout-17b-16e-instruct",
"meta-llama/llama-guard-4-12b",
"deepseek-r1-distill-llama-70b",
"qwen/qwen3-32b",
"moonshotai/kimi-k2-instruct-0905",
"gemma2-9b-it",
"whisper-large-v3",
"whisper-large-v3-turbo",
"distil-whisper-large-v3-en"
],
"fetch": false
},
"maxContextTokens": 50000,
"titleConvo": true,
"titleModel": "llama-3.3-70b-versatile",
"modelDisplayLabel": "groq",
"disableCodeInterpreter": true,
"tools": [
"codebase_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
},
{
"name": "Mistral",
"apiKey": "${MISTRAL_API_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://api.mistral.ai/v1",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"models": {
"default": [
"mistral-large-latest",
"mistral-small-latest",
"codestral-latest",
"pixtral-large-latest"
],
"fetch": true
},
"maxContextTokens": 50000,
"titleConvo": true,
"titleMethod": "completion",
"titleModel": "mistral-small-latest",
"summarize": false,
"summaryModel": "mistral-small-latest",
"forcePrompt": false,
"modelDisplayLabel": "Mistral",
"dropParams": [
"stop",
"user",
"frequency_penalty",
"presence_penalty"
],
"disableCodeInterpreter": true,
"tools": [
"codebase_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
},
{
"name": "OpenRouter",
"apiKey": "${OPENROUTER_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://openrouter.ai/api/v1",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"models": {
"default": [
"openrouter/auto",
"anthropic/claude-sonnet-4.5",
"anthropic/claude-opus-4.1",
"anthropic/claude-opus-4",
"anthropic/claude-haiku-4.5",
"anthropic/claude-3.7-sonnet",
"anthropic/claude-3.5-sonnet",
"anthropic/claude-3.5-haiku",
"x-ai/grok-4-fast",
"openai/gpt-5-pro",
"openai/gpt-4o",
"openai/gpt-4o-mini",
"openai/o3-mini",
"deepseek/deepseek-r1",
"deepseek/deepseek-chat",
"google/gemini-pro-1.5",
"google/gemini-flash-1.5",
"google/gemini-2.0-flash-thinking-exp",
"meta-llama/llama-3.3-70b-instruct",
"meta-llama/llama-3.1-405b-instruct",
"qwen/qwen-2.5-72b-instruct",
"qwen/qwq-32b-preview",
"mistralai/mistral-large",
"mistralai/mistral-small",
"cohere/command-r-plus",
"cohere/command-r"
],
"fetch": false
},
"maxContextTokens": 50000,
"dropParams": [
"stop"
],
"titleConvo": true,
"titleModel": "openai/gpt-4o-mini",
"summarize": false,
"summaryModel": "openai/gpt-4o-mini",
"forcePrompt": false,
"modelDisplayLabel": "OpenRouter",
"disableCodeInterpreter": true,
"tools": [
"codebase_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
},
{
"name": "Perplexity",
"apiKey": "${PERPLEXITY_API_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://api.perplexity.ai/",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"models": {
"default": [
"sonar-reasoning",
"sonar-pro",
"sonar"
],
"fetch": true
},
"maxContextTokens": 50000,
"titleConvo": true,
"titleModel": "sonar",
"summarize": false,
"summaryModel": "sonar",
"forcePrompt": false,
"dropParams": [
"stop",
"frequency_penalty"
],
"modelDisplayLabel": "Perplexity",
"disableCodeInterpreter": true,
"tools": [
"codebase_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
},
{
"name": "Vercel AI",
"apiKey": "${VERCELAI_API_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://ai-gateway.vercel.sh/v1",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"models": {
"default": [
"openai/gpt-4o",
"openai/gpt-4o-mini",
"meta-llama/llama-3.1-70b-instruct"
],
"fetch": true
},
"maxContextTokens": 50000,
"titleConvo": true,
"titleModel": "openai/gpt-4o-mini",
"modelDisplayLabel": "Vercel AI",
"iconURL": "https://vercel.com/favicon.ico",
"disableCodeInterpreter": true,
"tools": [
"codebase_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
},
{
"name": "Jina DeepSearch",
"apiKey": "${JINA_API_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://deepsearch.jina.ai/v1",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"models": {
"default": [
"jina-deepsearch-v1"
],
"fetch": true
},
"maxContextTokens": 50000,
"titleConvo": true,
"titleModel": "jina-deepsearch-v1",
"modelDisplayLabel": "Jina DeepSearch",
"iconURL": "https://jina.ai/favicon.ico",
"disableCodeInterpreter": true,
"tools": [
"codebase_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
},
{
"name": "Combined Memory",
"apiKey": "${OPENAI_API_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://gateway.ai.cloudflare.com/v1/4b339d9b82347caea48bbe42d4896bbd/combined-memory/compat",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_unified_context"
},
"models": {
"default": [
"openai/gpt-4o",
"openai/gpt-4o-mini",
"openai/gpt-4-turbo",
"openai/gpt-5-pro",
"openai/gpt-5-codex",
"openai/o3-mini-2025-01-31",
"openai/gpt-4o-search-preview-2025-03-11",
"mistral/mistral-medium-2508",
"mistral/mistral-small-3.2-24b-instruct-2506",
"mistral/devstral-medium-2507",
"groq/moonshotai/kimi-k2-instruct-0905",
"groq/deepseek-r1-distill-llama-70b"
],
"fetch": true
},
"maxContextTokens": 50000,
"titleConvo": true,
"titleModel": "openai/gpt-4o-mini",
"modelDisplayLabel": "Combined Memory",
"iconURL": "https://chat.combinedmemory.com/logo.png",
"disableCodeInterpreter": true,
"tools": [
"codebase_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
},
{
"name": "Research Assistant",
"apiKey": "${OPENAI_API_KEY}",
"baseURL": "https://api.supermemory.ai/v3/https://api.openai.com/v1",
"headers": {
"x-supermemory-api-key": "${SUPERMEMORY_API_KEY}",
"x-sm-user-id": "quinn",
"x-sm-conversation-id": "quinn_research_context"
},
"models": {
"default": [
"gpt-4o",
"gpt-4o-mini",
"o3-mini",
"gpt-5-pro"
],
"fetch": false
},
"maxContextTokens": 128000,
"titleConvo": true,
"titleModel": "gpt-4o-mini",
"modelDisplayLabel": "Research Assistant",
"iconURL": "https://cdn-icons-png.flaticon.com/512/3976/3976625.png",
"addParams": {
"store": true
},
"modelParameters": {
"temperature": 0.3,
"top_p": 0.9,
"presence_penalty": 0.1
},
"systemMessage": "You are a research assistant with access to unlimited context memory. Your capabilities include:\n\n1. **Document Analysis**: Search through uploaded research papers, PDFs, and documents using file_search\n2. **Memory Management**: Remember key findings, insights, and discoveries from conversations using add_memory and search_memory\n3. **Information Synthesis**: Help synthesize information across multiple sources and conversations\n4. **Research Tracking**: Track research progress, important discoveries, and follow-up tasks\n5. **Web Research**: Use web_search to find academic papers, articles, and supplementary information\n6. **Code Execution**: Run analysis scripts, data processing, or calculations using e2b_code_execution\n\nKey principles:\n- Always cite sources when referencing uploaded documents\n- Store important findings to memory automatically\n- Retrieve relevant past research when answering questions\n- Be precise and academic in your responses\n- Ask clarifying questions when research needs are unclear\n- Organize information hierarchically and logically\n\nWhen a user uploads a file, automatically:\n1. Add it to memory with a descriptive title\n2. Extract and remember key findings\n3. Note the document's main topics and conclusions\n",
"disableCodeInterpreter": false,
"tools": [
"file_search",
"web_search",
"e2b_code_execution",
"e2b_workspace",
"airtop_browser",
"search_memory",
"add_memory",
"fetch_recent_memories",
"codebase_search",
"markupgo_image",
"markupgo_pdf",
"markupgo_office"
]
}
]
},
"mcpServers": {
"imessage": {
"type": "stdio",
"command": "node",
"args": [
"imessage-mcp-server.js"
],
"env": {
"MEILI_HOST": "${MEILI_HOST}",
"MEILI_MASTER_KEY": "${MEILI_MASTER_KEY}"
},
"timeout": 30000,
"iconPath": "/assets/logo.png",
"startup": false
},
"railway": {
"type": "http",
"url": "https://server.smithery.ai/@jason-tan-swe/railway-mcp/mcp?api_key=c0696122-7da9-4c99-84e2-1798ce74a954&profile=given-narwhal-PvFXc4",
"timeout": 120000,
"iconPath": "https://railway.app/favicon.ico"
},
"rube": {
"type": "http",
"url": "https://rube.app/mcp",
"headers": {
"Authorization": "Bearer ${RUBE_API_KEY}"
},
"timeout": 120000,
"iconPath": "https://rube.app/favicon.ico"
},
"n8n": {
"type": "stdio",
"command": "npx",
"args": [
"-y",
"mcp-remote",
"https://czlonkowskin8n-mcp-railwaylatest-production-23d6.up.railway.app/mcp",
"--header",
"Authorization: Bearer ${N8N_API_KEY}"
],
"timeout": 120000,
"iconPath": "https://n8n.io/favicon.ico",
"startup": false
},
"mobile-mcp": {
"type": "http",
"url": "http://100.75.150.72:3001",
"timeout": 120000,
"iconPath": "https://mobilenext.com/favicon.ico",
"startup": false
},
"memory": {
"type": "http",
"url": "https://api.supermemory.ai/mcp",
"headers": {
"x-api-key": "${SUPERMEMORY_API_KEY}"
},
"timeout": 600000,
"iconPath": "https://chat.combinedmemory.com/logo.png",
"instructions": "Use this server to access Quinn's comprehensive memory system with 2000+ stored conversations and information. Tools available: search (find past context), addMemory (store important details), getProjects (list memory categories). Always search memory when Quinn references past work, asks about preferences, or mentions previous conversations. Store important client names, API keys, technical decisions, and project details."
},
"jina": {
"type": "stdio",
"command": "npx",
"args": [
"-y",
"mcp-remote",
"https://mcp.jina.ai/sse",
"--header",
"Authorization: Bearer ${JINA_API_KEY}"
],
"timeout": 120000,
"iconPath": "https://jina.ai/favicon.ico",
"startup": false
},
"github": {
"type": "http",
"url": "https://api.githubcopilot.com/mcp/",
"timeout": 120000,
"iconPath": "https://github.com/favicon.ico",
"headers": {
"Authorization": "Bearer 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"
}
},
"meta-ads": {
"type": "http",
"url": "https://apollo.composio.dev/v3/mcp/439e4933-7c69-4b0d-b2c0-7172e8c6e668/mcp?user_id=meta",
"timeout": 120000,
"iconPath": "https://composio.dev/favicon.ico"
},
"browserbase": {
"type": "http",
"url": "https://server.smithery.ai/@browserbasehq/mcp-browserbase/mcp?api_key=c0696122-7da9-4c99-84e2-1798ce74a954&profile=given-narwhal-PvFXc4",
"timeout": 120000,
"iconPath": "https://www.browserbase.com/favicon.ico",
"startup": false
},
"webflow-designer": {
"type": "sse",
"url": "https://mcp.webflow.com/sse",
"requiresOAuth": true,
"clientId": "d433513d554b8e74bf833fa8728a837d08c227d045e708c5e99d610fab7e2b71",
"clientSecret": "${WEBFLOW_CLIENT_SECRET}",
"timeout": 120000,
"iconPath": "https://webflow.com/favicon.ico"
},
"company-research": {
"type": "stdio",
"command": "node",
"args": [
"/Users/quinnmay/developer/CombinedMemory/mcp-servers/company-research/index.js"
],
"env": {
"AIRTOP_API_KEY": "${AIRTOP_API_KEY}",
"JINA_API_KEY": "${JINA_API_KEY}",
"MONGO_URI": "${MONGO_URI}"
},
"timeout": 30000,
"iconPath": "/assets/logo.png",
"startup": false,
"instructions": "Research company profiles in Lebanon, OH service area (30 miles) using Airtop browser automation and Jina AI. Extract contacts, validate locations, store in MongoDB. Tools: search_company, extract_contact_info, geocode_address, validate_service_area, create_company_profile, search_nearby_companies, get_lebanon_ohio_companies, format_phone_number."
}
}
}
|
Thu Oct 30 2025 05:02:49 GMT+0000 (Coordinated Universal Time)
|
v1
|
version: 1.3.0
cache: true
# Memory configurati…
|
Wed Oct 29 2025 16:30:58 GMT+0000 (Coordinated Universal Time)
|
| Documents | 1 |
| Total doc size | 39.61 KB |
| Average doc size | 39.61 KB |
| Pre-allocated size | 60 KB |
| Indexes | 1 |
| Total index size | 20 KB |
| Padding factor | |
| Extents |
| Name | Columns | Size | Attributes | Actions |
|---|---|---|---|---|
| _id_ |
_id ASC
|
20 KB | DEL |