Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 18 additions & 2 deletions extras/openmemory-mcp/.env.template
Original file line number Diff line number Diff line change
@@ -1,11 +1,27 @@
# OpenMemory MCP Configuration
# Copy this file to .env and fill in your values

# Required: OpenAI API Key for memory processing
# Required: OpenAI-compatible API key used by OpenMemory defaults
OPENAI_API_KEY=

# Optional: OpenAI-compatible base URL (for local providers)
# Example: http://host.docker.internal:11434/v1
OPENAI_BASE_URL=

# Optional: Embedding model metadata (for local embedding setups)
OPENAI_EMBEDDING_MODEL=
OPENAI_EMBEDDING_DIMENSIONS=

# Wizard metadata for embedding provider selection
# Supported values: openai, local
OPENMEMORY_EMBEDDINGS_PROVIDER=openai
OPENMEMORY_EMBEDDINGS_BASE_URL=
OPENMEMORY_EMBEDDINGS_MODEL=
OPENMEMORY_EMBEDDINGS_API_KEY=
OPENMEMORY_EMBEDDINGS_DIMENSIONS=

# Optional: User identifier (defaults to system username)
USER=openmemory

# Optional: Frontend URL (if using UI)
NEXT_PUBLIC_API_URL=http://localhost:8765
NEXT_PUBLIC_API_URL=http://localhost:8765
8 changes: 5 additions & 3 deletions extras/openmemory-mcp/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ OpenMemory MCP is a memory service from mem0.ai that provides:

```bash
cp .env.template .env
# Edit .env and add your OPENAI_API_KEY
# Edit .env and add your embedding provider settings
# - OpenAI: OPENAI_API_KEY
# - Local OpenAI-compatible: OPENAI_BASE_URL, OPENAI_API_KEY, OPENAI_EMBEDDING_MODEL, OPENAI_EMBEDDING_DIMENSIONS
```

### 2. Start Services
Expand Down Expand Up @@ -64,7 +66,7 @@ The deployment includes:
- **MCP Server**: http://localhost:8765
- REST API: `/api/v1/memories`
- MCP SSE: `/mcp/{client_name}/sse/{user_id}`

- **Qdrant Dashboard**: http://localhost:6334/dashboard

- **UI** (if enabled): http://localhost:3001
Expand Down Expand Up @@ -184,4 +186,4 @@ OpenMemory uses OpenAI by default. To use different models, you would need to mo

- [OpenMemory Documentation](https://docs.mem0.ai/open-memory/introduction)
- [MCP Protocol Spec](https://github.com/mem0ai/mem0/tree/main/openmemory)
- [Chronicle Memory Docs](../../backends/advanced/MEMORY_PROVIDERS.md)
- [Chronicle Memory Docs](../../backends/advanced/MEMORY_PROVIDERS.md)
185 changes: 164 additions & 21 deletions extras/openmemory-mcp/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,38 @@ set -euo pipefail

# Parse command line arguments
OPENAI_API_KEY=""
EMBEDDINGS_PROVIDER=""
LOCAL_EMBEDDINGS_BASE_URL=""
LOCAL_EMBEDDINGS_MODEL=""
LOCAL_EMBEDDINGS_API_KEY=""
LOCAL_EMBEDDINGS_DIMENSIONS=""

while [[ $# -gt 0 ]]; do
case $1 in
--openai-api-key)
OPENAI_API_KEY="$2"
shift 2
;;
--embeddings-provider)
EMBEDDINGS_PROVIDER="$2"
shift 2
;;
--embeddings-base-url)
LOCAL_EMBEDDINGS_BASE_URL="$2"
shift 2
;;
--embeddings-model)
LOCAL_EMBEDDINGS_MODEL="$2"
shift 2
;;
--embeddings-api-key)
LOCAL_EMBEDDINGS_API_KEY="$2"
shift 2
;;
--embeddings-dimensions)
LOCAL_EMBEDDINGS_DIMENSIONS="$2"
shift 2
;;
*)
echo "Unknown argument: $1"
exit 1
Expand Down Expand Up @@ -43,36 +68,154 @@ fi
# Set restrictive permissions (owner read/write only)
chmod 600 .env

# Get OpenAI API Key (prompt only if not provided via command line)
if [ -z "$OPENAI_API_KEY" ]; then
# Utility: replace env key or append if missing
upsert_env_key() {
local key="$1"
local value="$2"
local temp_file

temp_file=$(mktemp)
awk -v key="$key" -v value="$value" '
BEGIN { found=0 }
$0 ~ ("^" key "=") { print key "=" value; found=1; next }
{ print }
END { if (!found) print key "=" value }
' .env > "$temp_file"
mv "$temp_file" .env
}

if [ -z "$EMBEDDINGS_PROVIDER" ]; then
echo ""
echo "🔑 OpenAI API Key (required for memory extraction)"
echo "Get yours from: https://platform.openai.com/api-keys"
echo "🧩 Embedding provider"
echo "1) OpenAI embeddings"
echo "2) Local OpenAI-compatible embeddings"
while true; do
read -s -r -p "OpenAI API Key: " OPENAI_API_KEY
echo # Print newline after silent input
if [ -n "$OPENAI_API_KEY" ]; then
break
fi
echo "Error: OpenAI API Key cannot be empty. Please try again."
read -r -p "Choose provider [1/2]: " provider_choice
case "$provider_choice" in
1)
EMBEDDINGS_PROVIDER="openai"
break
;;
2)
EMBEDDINGS_PROVIDER="local"
break
;;
*)
echo "Error: Please enter 1 or 2."
;;
esac
done
else
echo "✅ OpenAI API key configured from command line"
fi

# Update .env file safely using awk - replace existing line or append if missing
temp_file=$(mktemp)
awk -v key="$OPENAI_API_KEY" '
/^OPENAI_API_KEY=/ { print "OPENAI_API_KEY=" key; found=1; next }
{ print }
END { if (!found) print "OPENAI_API_KEY=" key }
' .env > "$temp_file"
mv "$temp_file" .env
if [ "$EMBEDDINGS_PROVIDER" != "openai" ] && [ "$EMBEDDINGS_PROVIDER" != "local" ]; then
echo "Error: --embeddings-provider must be 'openai' or 'local'" >&2
exit 1
fi

if [ "$EMBEDDINGS_PROVIDER" = "openai" ]; then
# Get OpenAI API Key (prompt only if not provided via command line)
if [ -z "$OPENAI_API_KEY" ]; then
echo ""
echo "🔑 OpenAI API Key (required for memory extraction + embeddings)"
echo "Get yours from: https://platform.openai.com/api-keys"
while true; do
read -s -r -p "OpenAI API Key: " OPENAI_API_KEY
echo # Print newline after silent input
if [ -n "$OPENAI_API_KEY" ]; then
break
fi
echo "Error: OpenAI API Key cannot be empty. Please try again."
done
else
echo "✅ OpenAI API key configured from command line"
fi

upsert_env_key "OPENMEMORY_EMBEDDINGS_PROVIDER" "openai"
upsert_env_key "OPENAI_API_KEY" "$OPENAI_API_KEY"

# Clear local embedding overrides for pure OpenAI mode
upsert_env_key "OPENAI_BASE_URL" ""
upsert_env_key "OPENAI_EMBEDDING_MODEL" ""
upsert_env_key "OPENAI_EMBEDDING_DIMENSIONS" ""
upsert_env_key "OPENMEMORY_EMBEDDINGS_BASE_URL" ""
upsert_env_key "OPENMEMORY_EMBEDDINGS_MODEL" ""
upsert_env_key "OPENMEMORY_EMBEDDINGS_API_KEY" ""
upsert_env_key "OPENMEMORY_EMBEDDINGS_DIMENSIONS" ""
else
echo ""
echo "🏠 Local embeddings configuration (OpenAI-compatible endpoint)"

if [ -z "$LOCAL_EMBEDDINGS_BASE_URL" ]; then
while true; do
read -r -p "Embeddings base URL (e.g. http://host.docker.internal:11434/v1): " LOCAL_EMBEDDINGS_BASE_URL
if [ -n "$LOCAL_EMBEDDINGS_BASE_URL" ]; then
break
fi
echo "Error: Base URL cannot be empty. Please try again."
done
fi

if [ -z "$LOCAL_EMBEDDINGS_MODEL" ]; then
while true; do
read -r -p "Embeddings model name: " LOCAL_EMBEDDINGS_MODEL
if [ -n "$LOCAL_EMBEDDINGS_MODEL" ]; then
break
fi
echo "Error: Model name cannot be empty. Please try again."
done
fi

if [ -z "$LOCAL_EMBEDDINGS_API_KEY" ]; then
while true; do
read -s -r -p "Embeddings API key: " LOCAL_EMBEDDINGS_API_KEY
echo
if [ -n "$LOCAL_EMBEDDINGS_API_KEY" ]; then
break
fi
echo "Error: API key cannot be empty. Please try again."
done
fi

if [ -z "$LOCAL_EMBEDDINGS_DIMENSIONS" ]; then
while true; do
read -r -p "Embedding dimensions (e.g. 768): " LOCAL_EMBEDDINGS_DIMENSIONS
if [[ "$LOCAL_EMBEDDINGS_DIMENSIONS" =~ ^[0-9]+$ ]] && [ "$LOCAL_EMBEDDINGS_DIMENSIONS" -gt 0 ]; then
break
fi
echo "Error: Dimensions must be a positive integer."
done
fi

upsert_env_key "OPENMEMORY_EMBEDDINGS_PROVIDER" "local"

# Keep OpenAI-compatible defaults pointed at the local embeddings endpoint.
# OpenMemory reads OPENAI_API_KEY by default, and OPENAI_BASE_URL can redirect
# OpenAI client calls to local-compatible servers.
upsert_env_key "OPENAI_API_KEY" "$LOCAL_EMBEDDINGS_API_KEY"
upsert_env_key "OPENAI_BASE_URL" "$LOCAL_EMBEDDINGS_BASE_URL"
upsert_env_key "OPENAI_EMBEDDING_MODEL" "$LOCAL_EMBEDDINGS_MODEL"
upsert_env_key "OPENAI_EMBEDDING_DIMENSIONS" "$LOCAL_EMBEDDINGS_DIMENSIONS"

# Also store explicit OpenMemory-local embedding fields for future tooling.
upsert_env_key "OPENMEMORY_EMBEDDINGS_BASE_URL" "$LOCAL_EMBEDDINGS_BASE_URL"
upsert_env_key "OPENMEMORY_EMBEDDINGS_MODEL" "$LOCAL_EMBEDDINGS_MODEL"
upsert_env_key "OPENMEMORY_EMBEDDINGS_API_KEY" "$LOCAL_EMBEDDINGS_API_KEY"
upsert_env_key "OPENMEMORY_EMBEDDINGS_DIMENSIONS" "$LOCAL_EMBEDDINGS_DIMENSIONS"
fi

echo ""
echo "✅ OpenMemory MCP configured!"
echo "📁 Configuration saved to .env"
echo ""
if [ "$EMBEDDINGS_PROVIDER" = "local" ]; then
echo "ℹ️ Local embeddings mode enabled"
echo " Base URL: $LOCAL_EMBEDDINGS_BASE_URL"
echo " Model: $LOCAL_EMBEDDINGS_MODEL"
echo " Dimensions: $LOCAL_EMBEDDINGS_DIMENSIONS"
else
echo "ℹ️ OpenAI embeddings mode enabled"
fi
echo ""
echo "🚀 To start: docker compose up -d"
echo "🌐 MCP Server: http://localhost:8765"
echo "📱 Web UI: http://localhost:3001"
echo "📱 Web UI: http://localhost:3001"
112 changes: 112 additions & 0 deletions tests/unit/test_openmemory_setup_script.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import shutil
import stat
import subprocess
from pathlib import Path

REPO_ROOT = Path(__file__).resolve().parents[2]
OPENMEMORY_DIR = REPO_ROOT / "extras" / "openmemory-mcp"


def _prepare_tmp_setup(tmp_path: Path) -> Path:
setup_src = OPENMEMORY_DIR / "setup.sh"
template_src = OPENMEMORY_DIR / ".env.template"

setup_dst = tmp_path / "setup.sh"
template_dst = tmp_path / ".env.template"

shutil.copy2(setup_src, setup_dst)
shutil.copy2(template_src, template_dst)

setup_dst.chmod(setup_dst.stat().st_mode | stat.S_IXUSR)
return setup_dst


def _read_env_map(env_path: Path) -> dict[str, str]:
data = {}
for raw_line in env_path.read_text(encoding="utf-8").splitlines():
line = raw_line.strip()
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
data[key] = value
return data


def test_setup_openai_embeddings_mode_writes_expected_env(tmp_path):
setup_script = _prepare_tmp_setup(tmp_path)

subprocess.run(
[
"bash",
str(setup_script),
"--embeddings-provider",
"openai",
"--openai-api-key",
"sk-test-openai",
],
cwd=tmp_path,
check=True,
capture_output=True,
text=True,
)

env_map = _read_env_map(tmp_path / ".env")
assert env_map["OPENMEMORY_EMBEDDINGS_PROVIDER"] == "openai"
assert env_map["OPENAI_API_KEY"] == "sk-test-openai"
assert env_map["OPENAI_BASE_URL"] == ""
assert env_map["OPENAI_EMBEDDING_MODEL"] == ""
assert env_map["OPENAI_EMBEDDING_DIMENSIONS"] == ""


def test_setup_local_embeddings_mode_writes_expected_env(tmp_path):
setup_script = _prepare_tmp_setup(tmp_path)

subprocess.run(
[
"bash",
str(setup_script),
"--embeddings-provider",
"local",
"--embeddings-base-url",
"http://host.docker.internal:11434/v1",
"--embeddings-model",
"nomic-embed-text",
"--embeddings-api-key",
"local-key",
"--embeddings-dimensions",
"768",
],
cwd=tmp_path,
check=True,
capture_output=True,
text=True,
)

env_map = _read_env_map(tmp_path / ".env")
assert env_map["OPENMEMORY_EMBEDDINGS_PROVIDER"] == "local"
assert env_map["OPENAI_API_KEY"] == "local-key"
assert env_map["OPENAI_BASE_URL"] == "http://host.docker.internal:11434/v1"
assert env_map["OPENAI_EMBEDDING_MODEL"] == "nomic-embed-text"
assert env_map["OPENAI_EMBEDDING_DIMENSIONS"] == "768"
assert (
env_map["OPENMEMORY_EMBEDDINGS_BASE_URL"]
== "http://host.docker.internal:11434/v1"
)
assert env_map["OPENMEMORY_EMBEDDINGS_MODEL"] == "nomic-embed-text"
assert env_map["OPENMEMORY_EMBEDDINGS_API_KEY"] == "local-key"
assert env_map["OPENMEMORY_EMBEDDINGS_DIMENSIONS"] == "768"


def test_setup_rejects_invalid_embeddings_provider(tmp_path):
setup_script = _prepare_tmp_setup(tmp_path)

result = subprocess.run(
["bash", str(setup_script), "--embeddings-provider", "invalid-provider"],
cwd=tmp_path,
check=False,
capture_output=True,
text=True,
)

assert result.returncode != 0
assert "--embeddings-provider must be 'openai' or 'local'" in result.stderr
Loading
Loading