Add generic memory gateway v1
This commit is contained in:
115
memory_gateway/api_v1.py
Normal file
115
memory_gateway/api_v1.py
Normal file
@ -0,0 +1,115 @@
|
||||
"""Generic Memory Gateway v1 HTTP API."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
|
||||
from .schemas import (
|
||||
AccessContext,
|
||||
CommitSessionRequest,
|
||||
CreateUserRequest,
|
||||
EpisodeAppendRequest,
|
||||
MemoryFeedbackRequest,
|
||||
MemoryPatchRequest,
|
||||
MemorySearchRequest,
|
||||
MemoryUpsertRequest,
|
||||
)
|
||||
from .server_auth import verify_api_key_compat
|
||||
from .services import service
|
||||
|
||||
router = APIRouter(prefix="/v1", tags=["memory-v1"], dependencies=[Depends(verify_api_key_compat)])
|
||||
|
||||
|
||||
@router.post("/users")
|
||||
async def create_user(request: CreateUserRequest):
|
||||
return service.create_user(request)
|
||||
|
||||
|
||||
@router.get("/users/{user_id}")
|
||||
async def get_user(user_id: str):
|
||||
return service.get_user(user_id)
|
||||
|
||||
|
||||
@router.post("/memory/search")
|
||||
async def search_memory(request: MemorySearchRequest):
|
||||
return await service.search_memory_with_openviking(request)
|
||||
|
||||
|
||||
@router.post("/memory")
|
||||
async def upsert_memory(request: MemoryUpsertRequest):
|
||||
return service.upsert_memory(request)
|
||||
|
||||
|
||||
@router.get("/memory/{memory_id}")
|
||||
async def get_memory(
|
||||
memory_id: str,
|
||||
user_id: str = Query(...),
|
||||
agent_id: Optional[str] = Query(default=None),
|
||||
workspace_id: Optional[str] = Query(default=None),
|
||||
session_id: Optional[str] = Query(default=None),
|
||||
):
|
||||
return service.get_memory(memory_id, AccessContext(user_id=user_id, agent_id=agent_id, workspace_id=workspace_id, session_id=session_id))
|
||||
|
||||
|
||||
@router.patch("/memory/{memory_id}")
|
||||
async def patch_memory(
|
||||
memory_id: str,
|
||||
patch: MemoryPatchRequest,
|
||||
user_id: str = Query(...),
|
||||
agent_id: Optional[str] = Query(default=None),
|
||||
workspace_id: Optional[str] = Query(default=None),
|
||||
session_id: Optional[str] = Query(default=None),
|
||||
):
|
||||
return service.patch_memory(memory_id, AccessContext(user_id=user_id, agent_id=agent_id, workspace_id=workspace_id, session_id=session_id), patch)
|
||||
|
||||
|
||||
@router.delete("/memory/{memory_id}")
|
||||
async def delete_memory(
|
||||
memory_id: str,
|
||||
user_id: str = Query(...),
|
||||
agent_id: Optional[str] = Query(default=None),
|
||||
workspace_id: Optional[str] = Query(default=None),
|
||||
session_id: Optional[str] = Query(default=None),
|
||||
):
|
||||
return service.delete_memory(memory_id, AccessContext(user_id=user_id, agent_id=agent_id, workspace_id=workspace_id, session_id=session_id))
|
||||
|
||||
|
||||
@router.post("/episodes")
|
||||
async def append_episode(request: EpisodeAppendRequest):
|
||||
return service.append_episode(request)
|
||||
|
||||
|
||||
@router.post("/sessions/{session_id}/commit")
|
||||
async def commit_session(session_id: str, request: CommitSessionRequest):
|
||||
return service.commit_session(session_id, request)
|
||||
|
||||
|
||||
@router.get("/users/{user_id}/profile")
|
||||
async def get_profile(user_id: str):
|
||||
return service.get_profile(user_id)
|
||||
|
||||
|
||||
@router.post("/memory/{memory_id}/feedback")
|
||||
async def memory_feedback(memory_id: str, request: MemoryFeedbackRequest):
|
||||
return service.add_feedback(memory_id, request)
|
||||
|
||||
|
||||
@router.get("/namespaces")
|
||||
async def list_namespaces(
|
||||
user_id: str = Query(...),
|
||||
agent_id: Optional[str] = Query(default=None),
|
||||
workspace_id: Optional[str] = Query(default=None),
|
||||
session_id: Optional[str] = Query(default=None),
|
||||
):
|
||||
return service.list_namespaces(AccessContext(user_id=user_id, agent_id=agent_id, workspace_id=workspace_id, session_id=session_id))
|
||||
|
||||
|
||||
@router.get("/audit")
|
||||
async def list_audit(limit: int = Query(default=100, ge=1, le=1000)):
|
||||
return service.list_audit(limit)
|
||||
|
||||
|
||||
@router.get("/evermemos/health")
|
||||
async def evermemos_health():
|
||||
return service.evermemos_health()
|
||||
@ -6,7 +6,7 @@ from typing import Optional
|
||||
import yaml
|
||||
from pydantic import ValidationError
|
||||
|
||||
from .types import Config, ServerConfig, OpenVikingConfig, MemoryConfig, LoggingConfig, LLMConfig, ObsidianConfig
|
||||
from .types import Config, ServerConfig, OpenVikingConfig, EverMemOSConfig, MemoryConfig, LoggingConfig, LLMConfig, ObsidianConfig, StorageConfig
|
||||
|
||||
|
||||
def load_config(config_path: Optional[str] = None) -> Config:
|
||||
@ -30,10 +30,12 @@ def load_config(config_path: Optional[str] = None) -> Config:
|
||||
return Config(
|
||||
server=ServerConfig(**data.get("server", {})),
|
||||
openviking=OpenVikingConfig(**data.get("openviking", {})),
|
||||
evermemos=EverMemOSConfig(**data.get("evermemos", {})),
|
||||
memory=MemoryConfig(**data.get("memory", {})),
|
||||
logging=LoggingConfig(**data.get("logging", {})),
|
||||
llm=LLMConfig(**data.get("llm", {})),
|
||||
obsidian=ObsidianConfig(**data.get("obsidian", {})),
|
||||
storage=StorageConfig(**data.get("storage", {})),
|
||||
)
|
||||
except (ValidationError, yaml.YAMLError) as e:
|
||||
print(f"配置文件解析错误: {e}")
|
||||
|
||||
113
memory_gateway/evermemos_client.py
Normal file
113
memory_gateway/evermemos_client.py
Normal file
@ -0,0 +1,113 @@
|
||||
"""Client for the external EverMemOS consolidation service."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
from .config import get_config
|
||||
from .schemas import AccessContext, EpisodeRecord, MemoryRecord
|
||||
|
||||
|
||||
class EverMemOSError(RuntimeError):
|
||||
"""Raised when the external EverMemOS service cannot consolidate."""
|
||||
|
||||
|
||||
class EverMemOSClient:
|
||||
"""Small HTTP client with a tolerant response normalizer.
|
||||
|
||||
The deployed EverMemOS API may evolve independently from Memory Gateway.
|
||||
Gateway sends a stable payload and accepts several common response shapes:
|
||||
`result`, `data`, or the raw top-level object with `candidates/promoted`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
base_url: str | None = None,
|
||||
api_key: str | None = None,
|
||||
timeout: int | None = None,
|
||||
health_path: str | None = None,
|
||||
consolidate_path: str | None = None,
|
||||
) -> None:
|
||||
config = get_config().evermemos
|
||||
self.base_url = (base_url or config.url).rstrip("/")
|
||||
self.api_key = api_key if api_key is not None else config.api_key
|
||||
self.timeout = timeout or config.timeout
|
||||
self.health_path = health_path or config.health_path
|
||||
self.consolidate_path = consolidate_path or config.consolidate_path
|
||||
|
||||
def _headers(self) -> dict[str, str]:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
if self.api_key:
|
||||
headers["X-API-Key"] = self.api_key
|
||||
headers["Authorization"] = f"Bearer {self.api_key}"
|
||||
return headers
|
||||
|
||||
def health(self) -> dict[str, Any]:
|
||||
url = self.base_url + self.health_path
|
||||
try:
|
||||
with httpx.Client(timeout=self.timeout, headers=self._headers()) as client:
|
||||
response = client.get(url)
|
||||
response.raise_for_status()
|
||||
return {"status": "ok", "url": self.base_url, "response": response.json()}
|
||||
except Exception as exc: # noqa: BLE001
|
||||
return {"status": "error", "url": self.base_url, "error": str(exc)}
|
||||
|
||||
def consolidate_session(
|
||||
self,
|
||||
session_id: str,
|
||||
ctx: AccessContext,
|
||||
episodes: list[EpisodeRecord],
|
||||
existing_memories: list[MemoryRecord],
|
||||
min_importance: float,
|
||||
target_namespace: str | None,
|
||||
) -> dict[str, Any]:
|
||||
payload = {
|
||||
"schema_version": "memory-gateway.evermemos.consolidate.v1",
|
||||
"session_id": session_id,
|
||||
"context": ctx.model_dump(mode="json"),
|
||||
"min_importance": min_importance,
|
||||
"target_namespace": target_namespace,
|
||||
"episodes": [episode.model_dump(mode="json") for episode in episodes],
|
||||
"existing_memories": [memory.model_dump(mode="json") for memory in existing_memories],
|
||||
}
|
||||
paths = [
|
||||
self.consolidate_path,
|
||||
"/v1/sessions/consolidate",
|
||||
"/v1/memory/consolidate",
|
||||
"/api/v1/sessions/consolidate",
|
||||
"/api/consolidate",
|
||||
"/consolidate",
|
||||
]
|
||||
errors: list[str] = []
|
||||
for path in dict.fromkeys(paths):
|
||||
try:
|
||||
with httpx.Client(timeout=self.timeout, headers=self._headers()) as client:
|
||||
response = client.post(self.base_url + path, json=payload)
|
||||
if response.status_code == 404:
|
||||
errors.append(f"{path}: 404")
|
||||
continue
|
||||
response.raise_for_status()
|
||||
return self._normalize_response(response.json(), path)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
errors.append(f"{path}: {exc}")
|
||||
if "Connection refused" in str(exc) or "timed out" in str(exc):
|
||||
break
|
||||
raise EverMemOSError("; ".join(errors) or "EverMemOS consolidation failed")
|
||||
|
||||
def _normalize_response(self, payload: dict[str, Any], path: str) -> dict[str, Any]:
|
||||
data = payload.get("result") or payload.get("data") or payload
|
||||
return {
|
||||
"backend": "external",
|
||||
"service_url": self.base_url,
|
||||
"endpoint": path,
|
||||
"raw": payload,
|
||||
"session_id": data.get("session_id"),
|
||||
"episodes": data.get("episodes"),
|
||||
"candidates": data.get("candidates") or data.get("candidate_memories") or [],
|
||||
"promoted": data.get("promoted") or data.get("promoted_memories") or data.get("memories") or [],
|
||||
"duplicates": data.get("duplicates") or [],
|
||||
"conflicts": data.get("conflicts") or [],
|
||||
"review_drafts": data.get("review_drafts") or [],
|
||||
}
|
||||
|
||||
108
memory_gateway/evermemos_service.py
Normal file
108
memory_gateway/evermemos_service.py
Normal file
@ -0,0 +1,108 @@
|
||||
"""Standalone EverMemOS-compatible consolidation service.
|
||||
|
||||
This is a lightweight local service for POC use. It intentionally exposes the
|
||||
same HTTP contract that Memory Gateway calls:
|
||||
|
||||
POST /v1/sessions/consolidate
|
||||
|
||||
The service does not own Memory Gateway's metadata database. It receives
|
||||
episodes and existing memories in the request, returns candidate/promoted
|
||||
MemoryRecord payloads, and creates Obsidian review drafts for high-value or
|
||||
conflicting candidates.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from fastapi import FastAPI
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from .config import load_config, set_config
|
||||
from .repositories import InMemoryRepository
|
||||
from .schemas import AccessContext, EpisodeRecord, MemoryRecord
|
||||
from .workers.evermemos_worker import EverMemOSWorker
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConsolidateRequest(BaseModel):
|
||||
schema_version: str = "memory-gateway.evermemos.consolidate.v1"
|
||||
session_id: str
|
||||
context: dict[str, Any]
|
||||
min_importance: float = 0.6
|
||||
target_namespace: str | None = None
|
||||
episodes: list[dict[str, Any]] = Field(default_factory=list)
|
||||
existing_memories: list[dict[str, Any]] = Field(default_factory=list)
|
||||
|
||||
|
||||
app = FastAPI(title="Local EverMemOS POC Service", version="0.1.0")
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health() -> dict[str, Any]:
|
||||
return {
|
||||
"status": "ok",
|
||||
"service": "evermemos-local",
|
||||
"version": "0.1.0",
|
||||
"contract": "memory-gateway.evermemos.consolidate.v1",
|
||||
}
|
||||
|
||||
|
||||
@app.post("/v1/sessions/consolidate")
|
||||
async def consolidate_session(request: ConsolidateRequest) -> dict[str, Any]:
|
||||
repo = InMemoryRepository()
|
||||
ctx = AccessContext.model_validate(request.context)
|
||||
|
||||
for item in request.existing_memories:
|
||||
try:
|
||||
repo.upsert_memory(MemoryRecord.model_validate(item))
|
||||
except Exception as exc: # noqa: BLE001
|
||||
logger.warning("Skipping invalid existing memory: %s", exc)
|
||||
|
||||
for item in request.episodes:
|
||||
try:
|
||||
repo.append_episode(EpisodeRecord.model_validate(item))
|
||||
except Exception as exc: # noqa: BLE001
|
||||
logger.warning("Skipping invalid episode: %s", exc)
|
||||
|
||||
worker = EverMemOSWorker(repo)
|
||||
result = worker.consolidate_session(
|
||||
session_id=request.session_id,
|
||||
ctx=ctx,
|
||||
min_importance=request.min_importance,
|
||||
target_namespace=request.target_namespace,
|
||||
)
|
||||
return {
|
||||
"status": "ok",
|
||||
"backend": "evermemos-local",
|
||||
"result": {
|
||||
"session_id": result.session_id,
|
||||
"episodes": result.episodes,
|
||||
"candidates": [memory.model_dump(mode="json") for memory in result.candidates],
|
||||
"promoted": [memory.model_dump(mode="json") for memory in result.promoted],
|
||||
"duplicates": result.duplicates,
|
||||
"conflicts": result.conflicts,
|
||||
"review_drafts": result.review_drafts,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
import uvicorn
|
||||
|
||||
parser = argparse.ArgumentParser(description="Run the local EverMemOS POC service.")
|
||||
parser.add_argument("--config", default="config.yaml")
|
||||
parser.add_argument("--host", default="127.0.0.1")
|
||||
parser.add_argument("--port", type=int, default=1995)
|
||||
args = parser.parse_args()
|
||||
|
||||
config = load_config(args.config)
|
||||
set_config(config)
|
||||
uvicorn.run(app, host=args.host, port=args.port, log_level=config.logging.level.lower())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
135
memory_gateway/mcp_tools_v1.py
Normal file
135
memory_gateway/mcp_tools_v1.py
Normal file
@ -0,0 +1,135 @@
|
||||
"""MCP tool definitions for the generic Memory Gateway contract.
|
||||
|
||||
The legacy MCP endpoint in server.py remains available. These definitions are
|
||||
the target v1 tool contract for Nanobot, Hermes Agent, OpenClaw, and other
|
||||
agent frameworks.
|
||||
"""
|
||||
|
||||
MEMORY_GATEWAY_MCP_TOOLS = [
|
||||
{
|
||||
"name": "memory_search",
|
||||
"description": "Search accessible memories with user/agent/workspace/session isolation.",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {"type": "string"},
|
||||
"agent_id": {"type": "string"},
|
||||
"workspace_id": {"type": "string"},
|
||||
"session_id": {"type": "string"},
|
||||
"query": {"type": "string"},
|
||||
"namespaces": {"type": "array", "items": {"type": "string"}},
|
||||
"limit": {"type": "integer", "default": 10},
|
||||
},
|
||||
"required": ["user_id", "query"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "memory_upsert",
|
||||
"description": "Create or update a memory record after ACL and namespace routing.",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {"type": "string"},
|
||||
"agent_id": {"type": "string"},
|
||||
"workspace_id": {"type": "string"},
|
||||
"session_id": {"type": "string"},
|
||||
"namespace": {"type": "string"},
|
||||
"memory_type": {"type": "string"},
|
||||
"content": {"type": "string"},
|
||||
"summary": {"type": "string"},
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"importance": {"type": "number"},
|
||||
"confidence": {"type": "number"},
|
||||
"visibility": {"type": "string"},
|
||||
},
|
||||
"required": ["user_id", "content"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "memory_append_episode",
|
||||
"description": "Append temporary episode/session memory without automatically promoting it.",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {"type": "string"},
|
||||
"agent_id": {"type": "string"},
|
||||
"workspace_id": {"type": "string"},
|
||||
"session_id": {"type": "string"},
|
||||
"content": {"type": "string"},
|
||||
"events": {"type": "array", "items": {"type": "object"}},
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
"required": ["user_id", "session_id", "content"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "memory_commit_session",
|
||||
"description": "Promote selected session memories into long-term memory via consolidation.",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {"type": "string"},
|
||||
"agent_id": {"type": "string"},
|
||||
"workspace_id": {"type": "string"},
|
||||
"session_id": {"type": "string"},
|
||||
"promote": {"type": "boolean", "default": True},
|
||||
"min_importance": {"type": "number", "default": 0.6},
|
||||
},
|
||||
"required": ["user_id", "session_id"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "memory_get_profile",
|
||||
"description": "Get the effective user profile memory.",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {"user_id": {"type": "string"}},
|
||||
"required": ["user_id"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "memory_list_namespaces",
|
||||
"description": "List namespaces visible to the current user/agent/workspace context.",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {"type": "string"},
|
||||
"agent_id": {"type": "string"},
|
||||
"workspace_id": {"type": "string"},
|
||||
"session_id": {"type": "string"},
|
||||
},
|
||||
"required": ["user_id"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "memory_delete",
|
||||
"description": "Delete or archive a memory record if the caller has access.",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {"type": "string"},
|
||||
"agent_id": {"type": "string"},
|
||||
"workspace_id": {"type": "string"},
|
||||
"memory_id": {"type": "string"},
|
||||
},
|
||||
"required": ["user_id", "memory_id"],
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "memory_feedback",
|
||||
"description": "Attach quality feedback to a memory record for pruning/merge decisions.",
|
||||
"inputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user_id": {"type": "string"},
|
||||
"agent_id": {"type": "string"},
|
||||
"workspace_id": {"type": "string"},
|
||||
"memory_id": {"type": "string"},
|
||||
"feedback": {"type": "string"},
|
||||
"comment": {"type": "string"},
|
||||
},
|
||||
"required": ["user_id", "memory_id", "feedback"],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
115
memory_gateway/namespace.py
Normal file
115
memory_gateway/namespace.py
Normal file
@ -0,0 +1,115 @@
|
||||
"""Namespace construction and access checks for Memory Gateway."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from .schemas import AccessContext, MemoryRecord, NamespaceInfo, Visibility
|
||||
|
||||
|
||||
def user_profile_namespace(user_id: str) -> str:
|
||||
return f"user/{user_id}/profile"
|
||||
|
||||
|
||||
def user_preferences_namespace(user_id: str) -> str:
|
||||
return f"user/{user_id}/preferences"
|
||||
|
||||
|
||||
def user_long_term_namespace(user_id: str) -> str:
|
||||
return f"user/{user_id}/long_term"
|
||||
|
||||
|
||||
def agent_memory_namespace(agent_id: str) -> str:
|
||||
return f"agent/{agent_id}/memory"
|
||||
|
||||
|
||||
def workspace_shared_namespace(workspace_id: str) -> str:
|
||||
return f"workspace/{workspace_id}/shared"
|
||||
|
||||
|
||||
def session_episodic_namespace(session_id: str) -> str:
|
||||
return f"session/{session_id}/episodic"
|
||||
|
||||
|
||||
def global_public_namespace() -> str:
|
||||
return "global/public"
|
||||
|
||||
|
||||
def default_namespace_for_context(ctx: AccessContext, visibility: Visibility) -> str:
|
||||
if visibility == Visibility.AGENT_ONLY and ctx.agent_id:
|
||||
return agent_memory_namespace(ctx.agent_id)
|
||||
if visibility == Visibility.WORKSPACE_SHARED and ctx.workspace_id:
|
||||
return workspace_shared_namespace(ctx.workspace_id)
|
||||
if ctx.session_id:
|
||||
return session_episodic_namespace(ctx.session_id)
|
||||
return user_long_term_namespace(ctx.user_id)
|
||||
|
||||
|
||||
def can_access_memory(ctx: AccessContext, memory: MemoryRecord) -> bool:
|
||||
if memory.expires_at and memory.expires_at <= datetime.now(timezone.utc):
|
||||
return False
|
||||
if memory.visibility == Visibility.GLOBAL:
|
||||
return True
|
||||
if memory.visibility == Visibility.PRIVATE:
|
||||
return memory.user_id == ctx.user_id
|
||||
if memory.visibility == Visibility.AGENT_ONLY:
|
||||
return memory.user_id == ctx.user_id and memory.agent_id == ctx.agent_id
|
||||
if memory.visibility == Visibility.WORKSPACE_SHARED:
|
||||
return memory.workspace_id is not None and memory.workspace_id == ctx.workspace_id
|
||||
return False
|
||||
|
||||
|
||||
def visible_namespaces(ctx: AccessContext) -> list[NamespaceInfo]:
|
||||
namespaces = [
|
||||
NamespaceInfo(
|
||||
namespace=user_profile_namespace(ctx.user_id),
|
||||
owner_user_id=ctx.user_id,
|
||||
visibility=Visibility.PRIVATE,
|
||||
description="用户 profile 与稳定偏好",
|
||||
),
|
||||
NamespaceInfo(
|
||||
namespace=user_preferences_namespace(ctx.user_id),
|
||||
owner_user_id=ctx.user_id,
|
||||
visibility=Visibility.PRIVATE,
|
||||
description="用户显式偏好",
|
||||
),
|
||||
NamespaceInfo(
|
||||
namespace=user_long_term_namespace(ctx.user_id),
|
||||
owner_user_id=ctx.user_id,
|
||||
visibility=Visibility.PRIVATE,
|
||||
description="用户长期记忆",
|
||||
),
|
||||
NamespaceInfo(
|
||||
namespace=global_public_namespace(),
|
||||
visibility=Visibility.GLOBAL,
|
||||
description="全局公开知识",
|
||||
),
|
||||
]
|
||||
if ctx.agent_id:
|
||||
namespaces.append(
|
||||
NamespaceInfo(
|
||||
namespace=agent_memory_namespace(ctx.agent_id),
|
||||
owner_user_id=ctx.user_id,
|
||||
visibility=Visibility.AGENT_ONLY,
|
||||
description="指定 agent 私有经验",
|
||||
)
|
||||
)
|
||||
if ctx.workspace_id:
|
||||
namespaces.append(
|
||||
NamespaceInfo(
|
||||
namespace=workspace_shared_namespace(ctx.workspace_id),
|
||||
owner_user_id=ctx.user_id,
|
||||
visibility=Visibility.WORKSPACE_SHARED,
|
||||
description="workspace / project 共享记忆",
|
||||
)
|
||||
)
|
||||
if ctx.session_id:
|
||||
namespaces.append(
|
||||
NamespaceInfo(
|
||||
namespace=session_episodic_namespace(ctx.session_id),
|
||||
owner_user_id=ctx.user_id,
|
||||
visibility=Visibility.PRIVATE,
|
||||
description="session 临时 episodic memory",
|
||||
)
|
||||
)
|
||||
return namespaces
|
||||
|
||||
77
memory_gateway/obsidian_review.py
Normal file
77
memory_gateway/obsidian_review.py
Normal file
@ -0,0 +1,77 @@
|
||||
"""Obsidian review draft writer."""
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from .config import get_config
|
||||
from .schemas import MemoryRecord
|
||||
|
||||
|
||||
def _slugify(value: str, fallback: str) -> str:
|
||||
slug = re.sub(r"[^a-zA-Z0-9\u4e00-\u9fff_-]+", "-", value.lower()).strip("-")
|
||||
slug = re.sub(r"-+", "-", slug)[:80].strip("-")
|
||||
return slug or fallback
|
||||
|
||||
|
||||
def write_review_draft(memory: MemoryRecord, reason: str, conflict_ids: list[str] | None = None) -> Path:
|
||||
config = get_config()
|
||||
review_dir = getattr(config.obsidian, "review_dir", "Reviews/Queue")
|
||||
vault_path = Path(config.obsidian.vault_path)
|
||||
target_dir = vault_path / review_dir
|
||||
target_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
title = memory.summary or memory.content[:80] or memory.id
|
||||
filename = f"{_slugify(title, memory.id)}-{memory.id}.md"
|
||||
path = target_dir / filename
|
||||
conflict_ids = conflict_ids or []
|
||||
|
||||
content = "\n".join(
|
||||
[
|
||||
"---",
|
||||
"type: memory_review",
|
||||
f"memory_id: {memory.id}",
|
||||
f"user_id: {memory.user_id}",
|
||||
f"agent_id: {memory.agent_id or ''}",
|
||||
f"workspace_id: {memory.workspace_id or ''}",
|
||||
f"namespace: {memory.namespace}",
|
||||
f"visibility: {memory.visibility.value}",
|
||||
f"importance: {memory.importance}",
|
||||
f"confidence: {memory.confidence}",
|
||||
f"reason: {reason}",
|
||||
f"created_at: {datetime.now(timezone.utc).isoformat()}",
|
||||
"tags:",
|
||||
" - memory/review",
|
||||
" - source/evermemos",
|
||||
"---",
|
||||
"",
|
||||
f"# Memory Review - {title}",
|
||||
"",
|
||||
"## Candidate",
|
||||
"",
|
||||
memory.content,
|
||||
"",
|
||||
"## Summary",
|
||||
"",
|
||||
memory.summary or "",
|
||||
"",
|
||||
"## Proposed Action",
|
||||
"",
|
||||
"- [ ] Accept",
|
||||
"- [ ] Edit",
|
||||
"- [ ] Reject",
|
||||
"- [ ] Merge",
|
||||
"- [ ] Archive",
|
||||
"",
|
||||
"## Conflict IDs",
|
||||
"",
|
||||
"\n".join(f"- {memory_id}" for memory_id in conflict_ids) if conflict_ids else "- none",
|
||||
"",
|
||||
"## Notes",
|
||||
"",
|
||||
]
|
||||
)
|
||||
path.write_text(content, encoding="utf-8")
|
||||
return path
|
||||
|
||||
328
memory_gateway/repositories.py
Normal file
328
memory_gateway/repositories.py
Normal file
@ -0,0 +1,328 @@
|
||||
"""Metadata repositories for Memory Gateway.
|
||||
|
||||
SQLite is the default POC store. The in-memory implementation is retained for
|
||||
small isolated tests and for cases where persistence is explicitly disabled.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Iterable, Optional, Protocol
|
||||
|
||||
from .config import get_config
|
||||
from .schemas import AuditLog, EpisodeRecord, MemoryRecord, ProfileRecord, UserRecord
|
||||
|
||||
|
||||
class MetadataRepository(Protocol):
|
||||
def create_user(self, user: UserRecord) -> UserRecord: ...
|
||||
def get_user(self, user_id: str) -> Optional[UserRecord]: ...
|
||||
def upsert_memory(self, memory: MemoryRecord) -> MemoryRecord: ...
|
||||
def get_memory(self, memory_id: str) -> Optional[MemoryRecord]: ...
|
||||
def delete_memory(self, memory_id: str) -> bool: ...
|
||||
def list_memories(self) -> Iterable[MemoryRecord]: ...
|
||||
def append_episode(self, episode: EpisodeRecord) -> EpisodeRecord: ...
|
||||
def list_session_episodes(self, session_id: str) -> list[EpisodeRecord]: ...
|
||||
def get_profile(self, user_id: str) -> Optional[ProfileRecord]: ...
|
||||
def upsert_profile(self, profile: ProfileRecord) -> ProfileRecord: ...
|
||||
def add_audit(self, audit: AuditLog) -> AuditLog: ...
|
||||
def list_audit(self, limit: int = 100) -> list[AuditLog]: ...
|
||||
|
||||
|
||||
def _json_dump_model(model) -> str:
|
||||
return json.dumps(model.model_dump(mode="json"), ensure_ascii=False)
|
||||
|
||||
|
||||
def _json_load_model(model_cls, payload: str):
|
||||
return model_cls.model_validate(json.loads(payload))
|
||||
|
||||
|
||||
class InMemoryRepository:
|
||||
def __init__(self) -> None:
|
||||
self.users: dict[str, UserRecord] = {}
|
||||
self.memories: dict[str, MemoryRecord] = {}
|
||||
self.episodes: dict[str, EpisodeRecord] = {}
|
||||
self.profiles: dict[str, ProfileRecord] = {}
|
||||
self.audit_logs: list[AuditLog] = []
|
||||
|
||||
def create_user(self, user: UserRecord) -> UserRecord:
|
||||
now = datetime.now(timezone.utc)
|
||||
user.created_at = now
|
||||
user.updated_at = now
|
||||
self.users[user.id] = user
|
||||
self.profiles.setdefault(
|
||||
user.id,
|
||||
ProfileRecord(user_id=user.id, namespace=user.profile_namespace or f"user/{user.id}/profile"),
|
||||
)
|
||||
return user
|
||||
|
||||
def get_user(self, user_id: str) -> Optional[UserRecord]:
|
||||
return self.users.get(user_id)
|
||||
|
||||
def upsert_memory(self, memory: MemoryRecord) -> MemoryRecord:
|
||||
now = datetime.now(timezone.utc)
|
||||
existing = self.memories.get(memory.id)
|
||||
if existing:
|
||||
memory.version = existing.version + 1
|
||||
memory.created_at = existing.created_at
|
||||
memory.updated_at = now
|
||||
self.memories[memory.id] = memory
|
||||
return memory
|
||||
|
||||
def get_memory(self, memory_id: str) -> Optional[MemoryRecord]:
|
||||
return self.memories.get(memory_id)
|
||||
|
||||
def delete_memory(self, memory_id: str) -> bool:
|
||||
return self.memories.pop(memory_id, None) is not None
|
||||
|
||||
def list_memories(self) -> Iterable[MemoryRecord]:
|
||||
return list(self.memories.values())
|
||||
|
||||
def append_episode(self, episode: EpisodeRecord) -> EpisodeRecord:
|
||||
self.episodes[episode.id] = episode
|
||||
return episode
|
||||
|
||||
def list_session_episodes(self, session_id: str) -> list[EpisodeRecord]:
|
||||
return [episode for episode in self.episodes.values() if episode.session_id == session_id]
|
||||
|
||||
def get_profile(self, user_id: str) -> Optional[ProfileRecord]:
|
||||
return self.profiles.get(user_id)
|
||||
|
||||
def upsert_profile(self, profile: ProfileRecord) -> ProfileRecord:
|
||||
profile.updated_at = datetime.now(timezone.utc)
|
||||
profile.version += 1
|
||||
self.profiles[profile.user_id] = profile
|
||||
return profile
|
||||
|
||||
def add_audit(self, audit: AuditLog) -> AuditLog:
|
||||
self.audit_logs.append(audit)
|
||||
return audit
|
||||
|
||||
def list_audit(self, limit: int = 100) -> list[AuditLog]:
|
||||
return self.audit_logs[-limit:]
|
||||
|
||||
|
||||
class SQLiteRepository:
|
||||
def __init__(self, db_path: str | Path) -> None:
|
||||
self.db_path = Path(db_path)
|
||||
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self._init_schema()
|
||||
|
||||
def _connect(self) -> sqlite3.Connection:
|
||||
conn = sqlite3.connect(self.db_path)
|
||||
conn.row_factory = sqlite3.Row
|
||||
return conn
|
||||
|
||||
def _init_schema(self) -> None:
|
||||
with self._connect() as conn:
|
||||
conn.executescript(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id TEXT PRIMARY KEY,
|
||||
payload TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS profiles (
|
||||
user_id TEXT PRIMARY KEY,
|
||||
payload TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS memories (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
agent_id TEXT,
|
||||
workspace_id TEXT,
|
||||
session_id TEXT,
|
||||
namespace TEXT NOT NULL,
|
||||
memory_type TEXT NOT NULL,
|
||||
visibility TEXT NOT NULL,
|
||||
importance REAL NOT NULL,
|
||||
confidence REAL NOT NULL,
|
||||
expires_at TEXT,
|
||||
archived_at TEXT,
|
||||
payload TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_memories_user ON memories(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_memories_namespace ON memories(namespace);
|
||||
CREATE INDEX IF NOT EXISTS idx_memories_workspace ON memories(workspace_id);
|
||||
CREATE TABLE IF NOT EXISTS episodes (
|
||||
id TEXT PRIMARY KEY,
|
||||
user_id TEXT NOT NULL,
|
||||
agent_id TEXT,
|
||||
workspace_id TEXT,
|
||||
session_id TEXT NOT NULL,
|
||||
namespace TEXT NOT NULL,
|
||||
payload TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_episodes_session ON episodes(session_id);
|
||||
CREATE TABLE IF NOT EXISTS audit_logs (
|
||||
id TEXT PRIMARY KEY,
|
||||
actor_user_id TEXT,
|
||||
actor_agent_id TEXT,
|
||||
action TEXT NOT NULL,
|
||||
target_type TEXT NOT NULL,
|
||||
target_id TEXT,
|
||||
namespace TEXT,
|
||||
decision TEXT NOT NULL,
|
||||
payload TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_created ON audit_logs(created_at);
|
||||
"""
|
||||
)
|
||||
|
||||
def create_user(self, user: UserRecord) -> UserRecord:
|
||||
now = datetime.now(timezone.utc)
|
||||
user.created_at = user.created_at or now
|
||||
user.updated_at = now
|
||||
with self._connect() as conn:
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO users(id, payload, updated_at) VALUES (?, ?, ?)",
|
||||
(user.id, _json_dump_model(user), user.updated_at.isoformat()),
|
||||
)
|
||||
self.upsert_profile(ProfileRecord(user_id=user.id, namespace=user.profile_namespace or f"user/{user.id}/profile"))
|
||||
return user
|
||||
|
||||
def get_user(self, user_id: str) -> Optional[UserRecord]:
|
||||
with self._connect() as conn:
|
||||
row = conn.execute("SELECT payload FROM users WHERE id = ?", (user_id,)).fetchone()
|
||||
return _json_load_model(UserRecord, row["payload"]) if row else None
|
||||
|
||||
def upsert_memory(self, memory: MemoryRecord) -> MemoryRecord:
|
||||
existing = self.get_memory(memory.id)
|
||||
now = datetime.now(timezone.utc)
|
||||
if existing:
|
||||
memory.version = existing.version + 1
|
||||
memory.created_at = existing.created_at
|
||||
memory.updated_at = now
|
||||
with self._connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO memories(
|
||||
id, user_id, agent_id, workspace_id, session_id, namespace,
|
||||
memory_type, visibility, importance, confidence, expires_at,
|
||||
archived_at, payload, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
memory.id,
|
||||
memory.user_id,
|
||||
memory.agent_id,
|
||||
memory.workspace_id,
|
||||
memory.session_id,
|
||||
memory.namespace,
|
||||
memory.memory_type.value,
|
||||
memory.visibility.value,
|
||||
memory.importance,
|
||||
memory.confidence,
|
||||
memory.expires_at.isoformat() if memory.expires_at else None,
|
||||
memory.archived_at.isoformat() if memory.archived_at else None,
|
||||
_json_dump_model(memory),
|
||||
memory.updated_at.isoformat(),
|
||||
),
|
||||
)
|
||||
return memory
|
||||
|
||||
def get_memory(self, memory_id: str) -> Optional[MemoryRecord]:
|
||||
with self._connect() as conn:
|
||||
row = conn.execute("SELECT payload FROM memories WHERE id = ?", (memory_id,)).fetchone()
|
||||
return _json_load_model(MemoryRecord, row["payload"]) if row else None
|
||||
|
||||
def delete_memory(self, memory_id: str) -> bool:
|
||||
with self._connect() as conn:
|
||||
cursor = conn.execute("DELETE FROM memories WHERE id = ?", (memory_id,))
|
||||
return cursor.rowcount > 0
|
||||
|
||||
def list_memories(self) -> Iterable[MemoryRecord]:
|
||||
with self._connect() as conn:
|
||||
rows = conn.execute("SELECT payload FROM memories").fetchall()
|
||||
return [_json_load_model(MemoryRecord, row["payload"]) for row in rows]
|
||||
|
||||
def append_episode(self, episode: EpisodeRecord) -> EpisodeRecord:
|
||||
with self._connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO episodes(
|
||||
id, user_id, agent_id, workspace_id, session_id, namespace, payload, created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
episode.id,
|
||||
episode.user_id,
|
||||
episode.agent_id,
|
||||
episode.workspace_id,
|
||||
episode.session_id,
|
||||
episode.namespace,
|
||||
_json_dump_model(episode),
|
||||
episode.created_at.isoformat(),
|
||||
),
|
||||
)
|
||||
return episode
|
||||
|
||||
def list_session_episodes(self, session_id: str) -> list[EpisodeRecord]:
|
||||
with self._connect() as conn:
|
||||
rows = conn.execute(
|
||||
"SELECT payload FROM episodes WHERE session_id = ? ORDER BY created_at ASC",
|
||||
(session_id,),
|
||||
).fetchall()
|
||||
return [_json_load_model(EpisodeRecord, row["payload"]) for row in rows]
|
||||
|
||||
def get_profile(self, user_id: str) -> Optional[ProfileRecord]:
|
||||
with self._connect() as conn:
|
||||
row = conn.execute("SELECT payload FROM profiles WHERE user_id = ?", (user_id,)).fetchone()
|
||||
return _json_load_model(ProfileRecord, row["payload"]) if row else None
|
||||
|
||||
def upsert_profile(self, profile: ProfileRecord) -> ProfileRecord:
|
||||
profile.updated_at = datetime.now(timezone.utc)
|
||||
with self._connect() as conn:
|
||||
conn.execute(
|
||||
"INSERT OR REPLACE INTO profiles(user_id, payload, updated_at) VALUES (?, ?, ?)",
|
||||
(profile.user_id, _json_dump_model(profile), profile.updated_at.isoformat()),
|
||||
)
|
||||
return profile
|
||||
|
||||
def add_audit(self, audit: AuditLog) -> AuditLog:
|
||||
with self._connect() as conn:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO audit_logs(
|
||||
id, actor_user_id, actor_agent_id, action, target_type, target_id,
|
||||
namespace, decision, payload, created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
audit.id,
|
||||
audit.actor_user_id,
|
||||
audit.actor_agent_id,
|
||||
audit.action,
|
||||
audit.target_type,
|
||||
audit.target_id,
|
||||
audit.namespace,
|
||||
audit.decision,
|
||||
_json_dump_model(audit),
|
||||
audit.created_at.isoformat(),
|
||||
),
|
||||
)
|
||||
return audit
|
||||
|
||||
def list_audit(self, limit: int = 100) -> list[AuditLog]:
|
||||
with self._connect() as conn:
|
||||
rows = conn.execute(
|
||||
"SELECT payload FROM audit_logs ORDER BY created_at DESC LIMIT ?",
|
||||
(limit,),
|
||||
).fetchall()
|
||||
return [_json_load_model(AuditLog, row["payload"]) for row in rows]
|
||||
|
||||
|
||||
def build_repository() -> MetadataRepository:
|
||||
config = get_config()
|
||||
if config.storage.backend == "memory":
|
||||
return InMemoryRepository()
|
||||
return SQLiteRepository(config.storage.sqlite_path)
|
||||
|
||||
|
||||
repository = build_repository()
|
||||
|
||||
227
memory_gateway/schemas.py
Normal file
227
memory_gateway/schemas.py
Normal file
@ -0,0 +1,227 @@
|
||||
"""Core schemas for the generic Memory Gateway v1 API."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import Any, Literal, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
def utc_now() -> datetime:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
|
||||
class Visibility(str, Enum):
|
||||
PRIVATE = "private"
|
||||
AGENT_ONLY = "agent-only"
|
||||
WORKSPACE_SHARED = "workspace-shared"
|
||||
GLOBAL = "global"
|
||||
|
||||
|
||||
class MemoryType(str, Enum):
|
||||
PROFILE = "profile"
|
||||
PREFERENCE = "preference"
|
||||
FACT = "fact"
|
||||
DECISION = "decision"
|
||||
SUMMARY = "summary"
|
||||
EPISODIC = "episodic"
|
||||
PROCEDURE = "procedure"
|
||||
EXPERIENCE = "experience"
|
||||
KNOWLEDGE = "knowledge"
|
||||
|
||||
|
||||
class SourceType(str, Enum):
|
||||
CONVERSATION = "conversation"
|
||||
TASK = "task"
|
||||
AGENT = "agent"
|
||||
OBSIDIAN = "obsidian"
|
||||
OPENVIKING = "openviking"
|
||||
EVERMEMOS = "evermemos"
|
||||
MANUAL = "manual"
|
||||
|
||||
|
||||
class UserRecord(BaseModel):
|
||||
id: str = Field(default_factory=lambda: f"user_{uuid4().hex[:12]}")
|
||||
display_name: str
|
||||
status: Literal["active", "disabled"] = "active"
|
||||
profile_namespace: Optional[str] = None
|
||||
preferences: dict[str, Any] = Field(default_factory=dict)
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
updated_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class AgentRecord(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
framework: str
|
||||
owner_user_id: Optional[str] = None
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class WorkspaceRecord(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
owner_user_id: str
|
||||
member_user_ids: list[str] = Field(default_factory=list)
|
||||
allowed_agent_ids: list[str] = Field(default_factory=list)
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class SessionRecord(BaseModel):
|
||||
id: str = Field(default_factory=lambda: f"sess_{uuid4().hex[:12]}")
|
||||
user_id: str
|
||||
agent_id: Optional[str] = None
|
||||
workspace_id: Optional[str] = None
|
||||
status: Literal["open", "committed", "expired"] = "open"
|
||||
expires_at: Optional[datetime] = None
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
updated_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class ACLRule(BaseModel):
|
||||
visibility: Visibility = Visibility.PRIVATE
|
||||
allowed_user_ids: list[str] = Field(default_factory=list)
|
||||
allowed_agent_ids: list[str] = Field(default_factory=list)
|
||||
allowed_workspace_ids: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class MemoryRecord(BaseModel):
|
||||
id: str = Field(default_factory=lambda: f"mem_{uuid4().hex[:16]}")
|
||||
user_id: str
|
||||
agent_id: Optional[str] = None
|
||||
workspace_id: Optional[str] = None
|
||||
session_id: Optional[str] = None
|
||||
namespace: str
|
||||
memory_type: MemoryType = MemoryType.FACT
|
||||
content: str
|
||||
summary: Optional[str] = None
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
importance: float = Field(default=0.5, ge=0, le=1)
|
||||
confidence: float = Field(default=0.8, ge=0, le=1)
|
||||
visibility: Visibility = Visibility.PRIVATE
|
||||
acl: ACLRule = Field(default_factory=ACLRule)
|
||||
source: SourceType = SourceType.MANUAL
|
||||
source_ref: Optional[str] = None
|
||||
embedding_ref: Optional[str] = None
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
updated_at: datetime = Field(default_factory=utc_now)
|
||||
expires_at: Optional[datetime] = None
|
||||
archived_at: Optional[datetime] = None
|
||||
version: int = 1
|
||||
|
||||
|
||||
class EpisodeRecord(BaseModel):
|
||||
id: str = Field(default_factory=lambda: f"epi_{uuid4().hex[:16]}")
|
||||
user_id: str
|
||||
agent_id: Optional[str] = None
|
||||
workspace_id: Optional[str] = None
|
||||
session_id: str
|
||||
namespace: str
|
||||
content: str
|
||||
summary: Optional[str] = None
|
||||
events: list[dict[str, Any]] = Field(default_factory=list)
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
source: SourceType = SourceType.CONVERSATION
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
expires_at: Optional[datetime] = None
|
||||
|
||||
|
||||
class ProfileRecord(BaseModel):
|
||||
id: str = Field(default_factory=lambda: f"profile_{uuid4().hex[:12]}")
|
||||
user_id: str
|
||||
namespace: str
|
||||
display_name: Optional[str] = None
|
||||
stable_facts: list[str] = Field(default_factory=list)
|
||||
preferences: dict[str, Any] = Field(default_factory=dict)
|
||||
working_style: list[str] = Field(default_factory=list)
|
||||
updated_from_memory_ids: list[str] = Field(default_factory=list)
|
||||
version: int = 1
|
||||
updated_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class AuditLog(BaseModel):
|
||||
id: str = Field(default_factory=lambda: f"audit_{uuid4().hex[:16]}")
|
||||
actor_user_id: Optional[str] = None
|
||||
actor_agent_id: Optional[str] = None
|
||||
action: str
|
||||
target_type: str
|
||||
target_id: Optional[str] = None
|
||||
namespace: Optional[str] = None
|
||||
decision: Literal["allow", "deny"] = "allow"
|
||||
reason: Optional[str] = None
|
||||
metadata: dict[str, Any] = Field(default_factory=dict)
|
||||
created_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class AccessContext(BaseModel):
|
||||
user_id: str
|
||||
agent_id: Optional[str] = None
|
||||
workspace_id: Optional[str] = None
|
||||
session_id: Optional[str] = None
|
||||
|
||||
|
||||
class CreateUserRequest(BaseModel):
|
||||
display_name: str
|
||||
user_id: Optional[str] = None
|
||||
preferences: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class MemorySearchRequest(AccessContext):
|
||||
query: str
|
||||
namespaces: list[str] = Field(default_factory=list)
|
||||
memory_types: list[MemoryType] = Field(default_factory=list)
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
limit: int = Field(default=10, ge=1, le=100)
|
||||
|
||||
|
||||
class MemoryUpsertRequest(AccessContext):
|
||||
namespace: Optional[str] = None
|
||||
memory_type: MemoryType = MemoryType.FACT
|
||||
content: str
|
||||
summary: Optional[str] = None
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
importance: float = Field(default=0.5, ge=0, le=1)
|
||||
confidence: float = Field(default=0.8, ge=0, le=1)
|
||||
visibility: Visibility = Visibility.PRIVATE
|
||||
source: SourceType = SourceType.MANUAL
|
||||
expires_at: Optional[datetime] = None
|
||||
|
||||
|
||||
class MemoryPatchRequest(BaseModel):
|
||||
content: Optional[str] = None
|
||||
summary: Optional[str] = None
|
||||
tags: Optional[list[str]] = None
|
||||
importance: Optional[float] = Field(default=None, ge=0, le=1)
|
||||
confidence: Optional[float] = Field(default=None, ge=0, le=1)
|
||||
visibility: Optional[Visibility] = None
|
||||
expires_at: Optional[datetime] = None
|
||||
|
||||
|
||||
class EpisodeAppendRequest(AccessContext):
|
||||
content: str
|
||||
namespace: Optional[str] = None
|
||||
events: list[dict[str, Any]] = Field(default_factory=list)
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
source: SourceType = SourceType.CONVERSATION
|
||||
expires_at: Optional[datetime] = None
|
||||
|
||||
|
||||
class CommitSessionRequest(AccessContext):
|
||||
promote: bool = True
|
||||
min_importance: float = Field(default=0.6, ge=0, le=1)
|
||||
target_namespace: Optional[str] = None
|
||||
|
||||
|
||||
class MemoryFeedbackRequest(AccessContext):
|
||||
feedback: Literal["useful", "not_useful", "incorrect", "duplicate", "outdated"]
|
||||
comment: Optional[str] = None
|
||||
|
||||
|
||||
class NamespaceInfo(BaseModel):
|
||||
namespace: str
|
||||
owner_user_id: Optional[str] = None
|
||||
visibility: Visibility
|
||||
description: str
|
||||
|
||||
@ -24,6 +24,16 @@ from .config import get_config, set_config, Config
|
||||
from .openviking_client import get_openviking_client, close_openviking_client
|
||||
from .document_ingest import convert_file_to_markdown, save_markdown_to_obsidian, slugify
|
||||
from .llm import LLMConfigurationError, LLMSummaryError, summarize_with_llm
|
||||
from .mcp_tools_v1 import MEMORY_GATEWAY_MCP_TOOLS
|
||||
from .schemas import (
|
||||
AccessContext,
|
||||
CommitSessionRequest,
|
||||
EpisodeAppendRequest,
|
||||
MemoryFeedbackRequest,
|
||||
MemorySearchRequest,
|
||||
MemoryUpsertRequest,
|
||||
)
|
||||
from .services import service as v1_service
|
||||
from .types import SearchRequest, AddMemoryRequest, AddResourceRequest, CommitSummaryRequest
|
||||
|
||||
# 配置日志
|
||||
@ -41,7 +51,7 @@ mcp_server = Server("memory-gateway")
|
||||
@mcp_server.list_tools()
|
||||
async def list_tools() -> list[Tool]:
|
||||
"""列出可用的 MCP 工具"""
|
||||
return [
|
||||
legacy_tools = [
|
||||
Tool(
|
||||
name="search",
|
||||
description="语义搜索记忆和资源",
|
||||
@ -135,12 +145,25 @@ async def list_tools() -> list[Tool]:
|
||||
},
|
||||
),
|
||||
]
|
||||
v1_tools = [
|
||||
Tool(
|
||||
name=definition["name"],
|
||||
description=definition["description"],
|
||||
inputSchema=definition["inputSchema"],
|
||||
)
|
||||
for definition in MEMORY_GATEWAY_MCP_TOOLS
|
||||
]
|
||||
return legacy_tools + v1_tools
|
||||
|
||||
|
||||
@mcp_server.call_tool()
|
||||
async def call_tool(name: str, arguments: Any) -> list[TextContent]:
|
||||
"""调用 MCP 工具"""
|
||||
try:
|
||||
if name.startswith("memory_"):
|
||||
result = await call_v1_memory_tool(name, arguments or {})
|
||||
return [TextContent(type="text", text=json.dumps(result, ensure_ascii=False, default=str))]
|
||||
|
||||
ov_client = await get_openviking_client()
|
||||
|
||||
if name == "search":
|
||||
@ -200,6 +223,60 @@ async def call_tool(name: str, arguments: Any) -> list[TextContent]:
|
||||
return [TextContent(type="text", text=f"Error: {str(e)}")]
|
||||
|
||||
|
||||
async def call_v1_memory_tool(name: str, arguments: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Dispatch v1 Memory Gateway MCP tools to the same service used by /v1."""
|
||||
if name == "memory_search":
|
||||
return _jsonable(await v1_service.search_memory_with_openviking(MemorySearchRequest(**arguments)))
|
||||
if name == "memory_upsert":
|
||||
return v1_service.upsert_memory(MemoryUpsertRequest(**arguments)).model_dump(mode="json")
|
||||
if name == "memory_append_episode":
|
||||
return v1_service.append_episode(EpisodeAppendRequest(**arguments)).model_dump(mode="json")
|
||||
if name == "memory_commit_session":
|
||||
session_id = arguments.get("session_id")
|
||||
if not session_id:
|
||||
raise ValueError("session_id is required")
|
||||
return _jsonable(v1_service.commit_session(session_id, CommitSessionRequest(**arguments)))
|
||||
if name == "memory_get_profile":
|
||||
return v1_service.get_profile(arguments["user_id"]).model_dump(mode="json")
|
||||
if name == "memory_list_namespaces":
|
||||
return {
|
||||
"namespaces": [
|
||||
item.model_dump(mode="json")
|
||||
for item in v1_service.list_namespaces(
|
||||
AccessContext(
|
||||
user_id=arguments["user_id"],
|
||||
agent_id=arguments.get("agent_id"),
|
||||
workspace_id=arguments.get("workspace_id"),
|
||||
session_id=arguments.get("session_id"),
|
||||
)
|
||||
)
|
||||
]
|
||||
}
|
||||
if name == "memory_delete":
|
||||
return v1_service.delete_memory(
|
||||
arguments["memory_id"],
|
||||
AccessContext(
|
||||
user_id=arguments["user_id"],
|
||||
agent_id=arguments.get("agent_id"),
|
||||
workspace_id=arguments.get("workspace_id"),
|
||||
session_id=arguments.get("session_id"),
|
||||
),
|
||||
)
|
||||
if name == "memory_feedback":
|
||||
return v1_service.add_feedback(arguments["memory_id"], MemoryFeedbackRequest(**arguments))
|
||||
raise ValueError(f"Unknown v1 memory tool: {name}")
|
||||
|
||||
|
||||
def _jsonable(value: Any) -> Any:
|
||||
if hasattr(value, "model_dump"):
|
||||
return value.model_dump(mode="json")
|
||||
if isinstance(value, list):
|
||||
return [_jsonable(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {key: _jsonable(item) for key, item in value.items()}
|
||||
return value
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""应用生命周期管理"""
|
||||
@ -401,10 +478,12 @@ async def health_check():
|
||||
try:
|
||||
ov_client = await get_openviking_client()
|
||||
ov_status = await ov_client.health_check()
|
||||
evermemos_status = v1_service.evermemos_health()
|
||||
return {
|
||||
"status": "ok",
|
||||
"gateway": "memory-gateway",
|
||||
"openviking": ov_status,
|
||||
"evermemos": evermemos_status,
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
@ -490,6 +569,12 @@ mcp_router.add_api_route("/rpc", mcp_rpc, methods=["POST"])
|
||||
# 注册 MCP 路由
|
||||
app.include_router(mcp_router, prefix="/mcp", tags=["mcp"])
|
||||
|
||||
# Generic Memory Gateway v1 routes are imported lazily here to avoid changing
|
||||
# the existing legacy /api and /mcp startup path.
|
||||
from .api_v1 import router as api_v1_router # noqa: E402
|
||||
|
||||
app.include_router(api_v1_router)
|
||||
|
||||
|
||||
@app.post("/api/search", dependencies=[Depends(verify_api_key)])
|
||||
async def api_search(request: SearchRequest):
|
||||
|
||||
15
memory_gateway/server_auth.py
Normal file
15
memory_gateway/server_auth.py
Normal file
@ -0,0 +1,15 @@
|
||||
"""Small auth bridge used by the modular v1 router."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Header, HTTPException, status
|
||||
|
||||
from .config import get_config
|
||||
|
||||
|
||||
def verify_api_key_compat(x_api_key: Optional[str] = Header(default=None)) -> None:
|
||||
expected_key = get_config().server.api_key
|
||||
if expected_key and x_api_key != expected_key:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid or missing API key")
|
||||
|
||||
369
memory_gateway/services.py
Normal file
369
memory_gateway/services.py
Normal file
@ -0,0 +1,369 @@
|
||||
"""Application services for the generic Memory Gateway v1 API."""
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
|
||||
from .config import get_config
|
||||
from .evermemos_client import EverMemOSError, EverMemOSClient
|
||||
from .namespace import can_access_memory, default_namespace_for_context, user_long_term_namespace, visible_namespaces
|
||||
from .openviking_client import get_openviking_client
|
||||
from .repositories import MetadataRepository, repository
|
||||
from .schemas import (
|
||||
AccessContext,
|
||||
AuditLog,
|
||||
CommitSessionRequest,
|
||||
CreateUserRequest,
|
||||
EpisodeAppendRequest,
|
||||
EpisodeRecord,
|
||||
MemoryFeedbackRequest,
|
||||
MemoryPatchRequest,
|
||||
MemoryRecord,
|
||||
MemorySearchRequest,
|
||||
MemoryType,
|
||||
MemoryUpsertRequest,
|
||||
NamespaceInfo,
|
||||
ProfileRecord,
|
||||
SourceType,
|
||||
UserRecord,
|
||||
Visibility,
|
||||
)
|
||||
from .workers.evermemos_worker import EverMemOSWorker
|
||||
|
||||
|
||||
class MemoryGatewayService:
|
||||
def __init__(self, repo: MetadataRepository = repository, evermemos_client: EverMemOSClient | None = None) -> None:
|
||||
self.repo = repo
|
||||
self.evermemos_client = evermemos_client
|
||||
|
||||
def create_user(self, request: CreateUserRequest) -> UserRecord:
|
||||
user = UserRecord(
|
||||
id=request.user_id or UserRecord(display_name=request.display_name).id,
|
||||
display_name=request.display_name,
|
||||
preferences=request.preferences,
|
||||
)
|
||||
user.profile_namespace = f"user/{user.id}/profile"
|
||||
self.repo.create_user(user)
|
||||
self._audit("create_user", "user", user.id, namespace=user.profile_namespace, actor_user_id=user.id)
|
||||
return user
|
||||
|
||||
def get_user(self, user_id: str) -> UserRecord:
|
||||
user = self.repo.get_user(user_id)
|
||||
if not user:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
|
||||
return user
|
||||
|
||||
def search_memory(self, request: MemorySearchRequest) -> dict:
|
||||
ctx = AccessContext(**request.model_dump(include={"user_id", "agent_id", "workspace_id", "session_id"}))
|
||||
query = request.query.lower().strip()
|
||||
results = []
|
||||
for memory in self.repo.list_memories():
|
||||
if not can_access_memory(ctx, memory):
|
||||
continue
|
||||
if request.namespaces and memory.namespace not in request.namespaces:
|
||||
continue
|
||||
if request.memory_types and memory.memory_type not in request.memory_types:
|
||||
continue
|
||||
if request.tags and not set(request.tags).intersection(memory.tags):
|
||||
continue
|
||||
haystack = " ".join([memory.content, memory.summary or "", " ".join(memory.tags)]).lower()
|
||||
if query and query not in haystack:
|
||||
continue
|
||||
score = self._score(memory, query)
|
||||
results.append({"memory": memory, "score": score})
|
||||
results.sort(key=lambda item: item["score"], reverse=True)
|
||||
return {"results": results[: request.limit], "total": len(results)}
|
||||
|
||||
async def search_memory_with_openviking(self, request: MemorySearchRequest) -> dict:
|
||||
"""Search local metadata first, then fan out to OpenViking for visible namespaces."""
|
||||
ctx = AccessContext(**request.model_dump(include={"user_id", "agent_id", "workspace_id", "session_id"}))
|
||||
local = self.search_memory(request)
|
||||
visible = {namespace.namespace for namespace in visible_namespaces(ctx)}
|
||||
requested = set(request.namespaces) if request.namespaces else visible
|
||||
allowed_namespaces = sorted(requested.intersection(visible))
|
||||
|
||||
openviking_results = []
|
||||
if allowed_namespaces and request.query.strip():
|
||||
try:
|
||||
ov_client = await get_openviking_client()
|
||||
per_namespace_limit = max(1, min(request.limit, 10))
|
||||
for namespace in allowed_namespaces:
|
||||
result = await ov_client.search(
|
||||
query=request.query,
|
||||
namespace=namespace,
|
||||
limit=per_namespace_limit,
|
||||
)
|
||||
for item in result.results:
|
||||
item = dict(item)
|
||||
item["namespace"] = namespace
|
||||
item["source"] = "openviking"
|
||||
openviking_results.append(item)
|
||||
except Exception as exc: # noqa: BLE001
|
||||
self._audit(
|
||||
"openviking_search_failed",
|
||||
"search",
|
||||
None,
|
||||
actor_user_id=request.user_id,
|
||||
actor_agent_id=request.agent_id,
|
||||
metadata={"error": str(exc)},
|
||||
)
|
||||
|
||||
self._audit(
|
||||
"memory_search",
|
||||
"memory",
|
||||
None,
|
||||
actor_user_id=request.user_id,
|
||||
actor_agent_id=request.agent_id,
|
||||
metadata={"query": request.query, "namespaces": allowed_namespaces, "openviking_results": len(openviking_results)},
|
||||
)
|
||||
return {
|
||||
"results": local["results"] + [{"openviking": item, "score": item.get("score", 0)} for item in openviking_results],
|
||||
"total": local["total"] + len(openviking_results),
|
||||
"local_total": local["total"],
|
||||
"openviking_total": len(openviking_results),
|
||||
"searched_namespaces": allowed_namespaces,
|
||||
}
|
||||
|
||||
def upsert_memory(self, request: MemoryUpsertRequest) -> MemoryRecord:
|
||||
ctx = AccessContext(**request.model_dump(include={"user_id", "agent_id", "workspace_id", "session_id"}))
|
||||
namespace = request.namespace or default_namespace_for_context(ctx, request.visibility)
|
||||
memory = MemoryRecord(
|
||||
user_id=request.user_id,
|
||||
agent_id=request.agent_id,
|
||||
workspace_id=request.workspace_id,
|
||||
session_id=request.session_id,
|
||||
namespace=namespace,
|
||||
memory_type=request.memory_type,
|
||||
content=request.content,
|
||||
summary=request.summary,
|
||||
tags=request.tags,
|
||||
importance=request.importance,
|
||||
confidence=request.confidence,
|
||||
visibility=request.visibility,
|
||||
source=request.source,
|
||||
expires_at=request.expires_at,
|
||||
)
|
||||
self.repo.upsert_memory(memory)
|
||||
self._audit("upsert_memory", "memory", memory.id, namespace=memory.namespace, actor_user_id=request.user_id, actor_agent_id=request.agent_id)
|
||||
return memory
|
||||
|
||||
def get_memory(self, memory_id: str, ctx: AccessContext) -> MemoryRecord:
|
||||
memory = self.repo.get_memory(memory_id)
|
||||
if not memory:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Memory not found")
|
||||
if not can_access_memory(ctx, memory):
|
||||
self._audit("get_memory", "memory", memory_id, namespace=memory.namespace, actor_user_id=ctx.user_id, actor_agent_id=ctx.agent_id, decision="deny")
|
||||
raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Memory access denied")
|
||||
return memory
|
||||
|
||||
def patch_memory(self, memory_id: str, ctx: AccessContext, patch: MemoryPatchRequest) -> MemoryRecord:
|
||||
memory = self.get_memory(memory_id, ctx)
|
||||
updates = patch.model_dump(exclude_unset=True)
|
||||
for key, value in updates.items():
|
||||
setattr(memory, key, value)
|
||||
memory.updated_at = datetime.now(timezone.utc)
|
||||
memory.version += 1
|
||||
self.repo.upsert_memory(memory)
|
||||
self._audit("patch_memory", "memory", memory.id, namespace=memory.namespace, actor_user_id=ctx.user_id, actor_agent_id=ctx.agent_id)
|
||||
return memory
|
||||
|
||||
def delete_memory(self, memory_id: str, ctx: AccessContext) -> dict:
|
||||
memory = self.get_memory(memory_id, ctx)
|
||||
deleted = self.repo.delete_memory(memory_id)
|
||||
self._audit("delete_memory", "memory", memory_id, namespace=memory.namespace, actor_user_id=ctx.user_id, actor_agent_id=ctx.agent_id)
|
||||
return {"deleted": deleted, "id": memory_id}
|
||||
|
||||
def append_episode(self, request: EpisodeAppendRequest) -> EpisodeRecord:
|
||||
ctx = AccessContext(**request.model_dump(include={"user_id", "agent_id", "workspace_id", "session_id"}))
|
||||
episode = EpisodeRecord(
|
||||
user_id=request.user_id,
|
||||
agent_id=request.agent_id,
|
||||
workspace_id=request.workspace_id,
|
||||
session_id=request.session_id or "default",
|
||||
namespace=request.namespace or default_namespace_for_context(ctx, Visibility.PRIVATE),
|
||||
content=request.content,
|
||||
events=request.events,
|
||||
tags=request.tags,
|
||||
source=request.source,
|
||||
expires_at=request.expires_at,
|
||||
)
|
||||
self.repo.append_episode(episode)
|
||||
self._audit("append_episode", "episode", episode.id, namespace=episode.namespace, actor_user_id=request.user_id, actor_agent_id=request.agent_id)
|
||||
return episode
|
||||
|
||||
def commit_session(self, session_id: str, request: CommitSessionRequest) -> dict:
|
||||
episodes = self.repo.list_session_episodes(session_id)
|
||||
backend = "disabled"
|
||||
error: str | None = None
|
||||
if request.promote:
|
||||
ctx = AccessContext(
|
||||
user_id=request.user_id,
|
||||
agent_id=request.agent_id,
|
||||
workspace_id=request.workspace_id,
|
||||
session_id=session_id,
|
||||
)
|
||||
target_namespace = request.target_namespace or user_long_term_namespace(request.user_id)
|
||||
config = get_config().evermemos
|
||||
if config.enabled:
|
||||
try:
|
||||
external_result = (self.evermemos_client or EverMemOSClient()).consolidate_session(
|
||||
session_id=session_id,
|
||||
ctx=ctx,
|
||||
episodes=episodes,
|
||||
existing_memories=list(self.repo.list_memories()),
|
||||
min_importance=request.min_importance,
|
||||
target_namespace=target_namespace,
|
||||
)
|
||||
result = self._persist_external_consolidation(external_result, ctx, session_id)
|
||||
backend = "external"
|
||||
except EverMemOSError as exc:
|
||||
error = str(exc)
|
||||
if not config.fallback_to_local:
|
||||
self._audit(
|
||||
"evermemos_commit_failed",
|
||||
"session",
|
||||
session_id,
|
||||
actor_user_id=request.user_id,
|
||||
actor_agent_id=request.agent_id,
|
||||
decision="deny",
|
||||
metadata={"error": error},
|
||||
)
|
||||
raise HTTPException(status_code=status.HTTP_502_BAD_GATEWAY, detail=f"EverMemOS failed: {error}") from exc
|
||||
result = self._commit_session_locally(session_id, ctx, request)
|
||||
backend = "local-fallback"
|
||||
else:
|
||||
result = self._commit_session_locally(session_id, ctx, request)
|
||||
backend = "local-disabled"
|
||||
else:
|
||||
result = None
|
||||
self._audit("commit_session", "session", session_id, actor_user_id=request.user_id, actor_agent_id=request.agent_id)
|
||||
if not result:
|
||||
return {"session_id": session_id, "episodes": len(episodes), "promoted": [], "evermemos_backend": backend}
|
||||
return {
|
||||
"evermemos_backend": backend,
|
||||
"evermemos_error": error,
|
||||
"session_id": session_id,
|
||||
"episodes": result.episodes,
|
||||
"candidates": result.candidates,
|
||||
"promoted": result.promoted,
|
||||
"duplicates": result.duplicates,
|
||||
"conflicts": result.conflicts,
|
||||
"review_drafts": result.review_drafts,
|
||||
}
|
||||
|
||||
def evermemos_health(self) -> dict:
|
||||
config = get_config().evermemos
|
||||
if not config.enabled:
|
||||
return {"status": "disabled", "url": config.url}
|
||||
return (self.evermemos_client or EverMemOSClient()).health()
|
||||
|
||||
def _commit_session_locally(self, session_id: str, ctx: AccessContext, request: CommitSessionRequest):
|
||||
worker = EverMemOSWorker(self.repo)
|
||||
return worker.consolidate_session(
|
||||
session_id=session_id,
|
||||
ctx=ctx,
|
||||
min_importance=request.min_importance,
|
||||
target_namespace=request.target_namespace or user_long_term_namespace(request.user_id),
|
||||
)
|
||||
|
||||
def _persist_external_consolidation(self, external_result: dict, ctx: AccessContext, session_id: str):
|
||||
from .workers.evermemos_worker import ConsolidationResult
|
||||
|
||||
result = ConsolidationResult(
|
||||
session_id=session_id,
|
||||
episodes=external_result.get("episodes") or len(self.repo.list_session_episodes(session_id)),
|
||||
duplicates=external_result.get("duplicates", []),
|
||||
conflicts=external_result.get("conflicts", []),
|
||||
review_drafts=external_result.get("review_drafts", []),
|
||||
)
|
||||
for item in external_result.get("candidates", []):
|
||||
memory = self._memory_from_external(item, ctx, session_id)
|
||||
if memory:
|
||||
result.candidates.append(memory)
|
||||
for item in external_result.get("promoted", []):
|
||||
memory = self._memory_from_external(item, ctx, session_id)
|
||||
if memory:
|
||||
self.repo.upsert_memory(memory)
|
||||
result.promoted.append(memory)
|
||||
if all(candidate.id != memory.id for candidate in result.candidates):
|
||||
result.candidates.append(memory)
|
||||
return result
|
||||
|
||||
def _memory_from_external(self, item: dict, ctx: AccessContext, session_id: str) -> MemoryRecord | None:
|
||||
if not isinstance(item, dict):
|
||||
return None
|
||||
data = dict(item)
|
||||
data.setdefault("user_id", ctx.user_id)
|
||||
data.setdefault("agent_id", ctx.agent_id)
|
||||
data.setdefault("workspace_id", ctx.workspace_id)
|
||||
data.setdefault("session_id", session_id)
|
||||
data.setdefault("namespace", default_namespace_for_context(ctx, Visibility.PRIVATE))
|
||||
data.setdefault("memory_type", MemoryType.SUMMARY.value)
|
||||
data.setdefault("content", data.get("text") or data.get("summary") or "")
|
||||
data.setdefault("summary", data.get("content", "")[:180])
|
||||
data.setdefault("tags", ["evermemos-external"])
|
||||
data.setdefault("importance", 0.7)
|
||||
data.setdefault("confidence", 0.65)
|
||||
data.setdefault("visibility", Visibility.PRIVATE.value)
|
||||
data.setdefault("source", SourceType.EVERMEMOS.value)
|
||||
if not data["content"]:
|
||||
return None
|
||||
return MemoryRecord.model_validate(data)
|
||||
|
||||
def get_profile(self, user_id: str) -> ProfileRecord:
|
||||
profile = self.repo.get_profile(user_id)
|
||||
if not profile:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Profile not found")
|
||||
return profile
|
||||
|
||||
def add_feedback(self, memory_id: str, request: MemoryFeedbackRequest) -> dict:
|
||||
ctx = AccessContext(**request.model_dump(include={"user_id", "agent_id", "workspace_id", "session_id"}))
|
||||
memory = self.get_memory(memory_id, ctx)
|
||||
self._audit(
|
||||
f"feedback:{request.feedback}",
|
||||
"memory",
|
||||
memory.id,
|
||||
namespace=memory.namespace,
|
||||
actor_user_id=request.user_id,
|
||||
actor_agent_id=request.agent_id,
|
||||
metadata={"comment": request.comment},
|
||||
)
|
||||
return {"status": "ok", "memory_id": memory_id, "feedback": request.feedback}
|
||||
|
||||
def list_namespaces(self, ctx: AccessContext) -> list[NamespaceInfo]:
|
||||
return visible_namespaces(ctx)
|
||||
|
||||
def list_audit(self, limit: int = 100) -> list[AuditLog]:
|
||||
return self.repo.list_audit(limit)
|
||||
|
||||
def _score(self, memory: MemoryRecord, query: str) -> float:
|
||||
lexical = 1.0 if query and query in memory.content.lower() else 0.2
|
||||
return lexical + memory.importance + memory.confidence
|
||||
|
||||
def _audit(
|
||||
self,
|
||||
action: str,
|
||||
target_type: str,
|
||||
target_id: str | None,
|
||||
namespace: str | None = None,
|
||||
actor_user_id: str | None = None,
|
||||
actor_agent_id: str | None = None,
|
||||
decision: str = "allow",
|
||||
metadata: dict | None = None,
|
||||
) -> None:
|
||||
self.repo.add_audit(
|
||||
AuditLog(
|
||||
actor_user_id=actor_user_id,
|
||||
actor_agent_id=actor_agent_id,
|
||||
action=action,
|
||||
target_type=target_type,
|
||||
target_id=target_id,
|
||||
namespace=namespace,
|
||||
decision=decision, # type: ignore[arg-type]
|
||||
metadata=metadata or {},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
service = MemoryGatewayService()
|
||||
2
memory_gateway/skills/__init__.py
Normal file
2
memory_gateway/skills/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
"""Skill skeletons for Memory Gateway processing units."""
|
||||
|
||||
21
memory_gateway/skills/base.py
Normal file
21
memory_gateway/skills/base.py
Normal file
@ -0,0 +1,21 @@
|
||||
"""Shared skill contracts."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
|
||||
|
||||
@dataclass
|
||||
class SkillResult:
|
||||
status: str
|
||||
output: dict[str, Any] = field(default_factory=dict)
|
||||
writes_long_term_memory: bool = False
|
||||
|
||||
|
||||
class MemorySkill:
|
||||
name = "memory_skill"
|
||||
writes_long_term_memory = False
|
||||
|
||||
async def run(self, payload: dict[str, Any]) -> SkillResult:
|
||||
raise NotImplementedError
|
||||
|
||||
9
memory_gateway/skills/classify_memory_skill.py
Normal file
9
memory_gateway/skills/classify_memory_skill.py
Normal file
@ -0,0 +1,9 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class ClassifyMemorySkill(MemorySkill):
|
||||
name = "classify_memory_skill"
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
return SkillResult(status="ok", output={"memory_type": payload.get("memory_type", "fact"), "visibility": payload.get("visibility", "private")})
|
||||
|
||||
10
memory_gateway/skills/commit_memory_skill.py
Normal file
10
memory_gateway/skills/commit_memory_skill.py
Normal file
@ -0,0 +1,10 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class CommitMemorySkill(MemorySkill):
|
||||
name = "commit_memory_skill"
|
||||
writes_long_term_memory = True
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
return SkillResult(status="ok", output={"committed": payload}, writes_long_term_memory=True)
|
||||
|
||||
9
memory_gateway/skills/export_to_obsidian_skill.py
Normal file
9
memory_gateway/skills/export_to_obsidian_skill.py
Normal file
@ -0,0 +1,9 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class ExportToObsidianSkill(MemorySkill):
|
||||
name = "export_to_obsidian_skill"
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
return SkillResult(status="ok", output={"draft_path": payload.get("draft_path")})
|
||||
|
||||
11
memory_gateway/skills/extract_memory_skill.py
Normal file
11
memory_gateway/skills/extract_memory_skill.py
Normal file
@ -0,0 +1,11 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class ExtractMemorySkill(MemorySkill):
|
||||
name = "extract_memory_skill"
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
text = payload.get("content", "")
|
||||
candidates = [{"content": text, "confidence": 0.5}] if text else []
|
||||
return SkillResult(status="ok", output={"candidates": candidates})
|
||||
|
||||
10
memory_gateway/skills/import_from_obsidian_skill.py
Normal file
10
memory_gateway/skills/import_from_obsidian_skill.py
Normal file
@ -0,0 +1,10 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class ImportFromObsidianSkill(MemorySkill):
|
||||
name = "import_from_obsidian_skill"
|
||||
writes_long_term_memory = True
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
return SkillResult(status="ok", output={"imported_path": payload.get("path")}, writes_long_term_memory=True)
|
||||
|
||||
9
memory_gateway/skills/ingest_skill.py
Normal file
9
memory_gateway/skills/ingest_skill.py
Normal file
@ -0,0 +1,9 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class IngestSkill(MemorySkill):
|
||||
name = "ingest_skill"
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
return SkillResult(status="ok", output={"normalized": payload})
|
||||
|
||||
10
memory_gateway/skills/merge_memory_skill.py
Normal file
10
memory_gateway/skills/merge_memory_skill.py
Normal file
@ -0,0 +1,10 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class MergeMemorySkill(MemorySkill):
|
||||
name = "merge_memory_skill"
|
||||
writes_long_term_memory = True
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
return SkillResult(status="ok", output={"merged": payload.get("memory_ids", [])}, writes_long_term_memory=True)
|
||||
|
||||
10
memory_gateway/skills/prune_memory_skill.py
Normal file
10
memory_gateway/skills/prune_memory_skill.py
Normal file
@ -0,0 +1,10 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class PruneMemorySkill(MemorySkill):
|
||||
name = "prune_memory_skill"
|
||||
writes_long_term_memory = True
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
return SkillResult(status="ok", output={"pruned": payload.get("memory_ids", [])}, writes_long_term_memory=True)
|
||||
|
||||
9
memory_gateway/skills/retrieve_context_skill.py
Normal file
9
memory_gateway/skills/retrieve_context_skill.py
Normal file
@ -0,0 +1,9 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class RetrieveContextSkill(MemorySkill):
|
||||
name = "retrieve_context_skill"
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
return SkillResult(status="ok", output={"query": payload.get("query"), "contexts": []})
|
||||
|
||||
10
memory_gateway/skills/summarize_episode_skill.py
Normal file
10
memory_gateway/skills/summarize_episode_skill.py
Normal file
@ -0,0 +1,10 @@
|
||||
from .base import MemorySkill, SkillResult
|
||||
|
||||
|
||||
class SummarizeEpisodeSkill(MemorySkill):
|
||||
name = "summarize_episode_skill"
|
||||
|
||||
async def run(self, payload: dict) -> SkillResult:
|
||||
content = payload.get("content", "")
|
||||
return SkillResult(status="ok", output={"summary": content[:500]})
|
||||
|
||||
@ -17,6 +17,17 @@ class OpenVikingConfig(BaseModel):
|
||||
timeout: int = 30
|
||||
|
||||
|
||||
class EverMemOSConfig(BaseModel):
|
||||
"""External EverMemOS consolidation service configuration."""
|
||||
enabled: bool = True
|
||||
url: str = "http://127.0.0.1:1995"
|
||||
api_key: str = ""
|
||||
timeout: int = 30
|
||||
health_path: str = "/health"
|
||||
consolidate_path: str = "/v1/sessions/consolidate"
|
||||
fallback_to_local: bool = True
|
||||
|
||||
|
||||
class MemoryConfig(BaseModel):
|
||||
"""记忆配置"""
|
||||
default_namespace: str = "memory-gateway"
|
||||
@ -36,6 +47,13 @@ class ObsidianConfig(BaseModel):
|
||||
"""Obsidian Vault 配置。"""
|
||||
vault_path: str = "/home/tom/memory-gateway/obsidian-vault"
|
||||
knowledge_dir: str = "01_Knowledge/Uploaded"
|
||||
review_dir: str = "Reviews/Queue"
|
||||
|
||||
|
||||
class StorageConfig(BaseModel):
|
||||
"""Metadata storage configuration."""
|
||||
backend: Literal["sqlite", "memory"] = "sqlite"
|
||||
sqlite_path: str = "/home/tom/memory-gateway/memory_gateway.sqlite3"
|
||||
|
||||
|
||||
class LoggingConfig(BaseModel):
|
||||
@ -48,10 +66,12 @@ class Config(BaseModel):
|
||||
"""完整配置"""
|
||||
server: ServerConfig = Field(default_factory=ServerConfig)
|
||||
openviking: OpenVikingConfig = Field(default_factory=OpenVikingConfig)
|
||||
evermemos: EverMemOSConfig = Field(default_factory=EverMemOSConfig)
|
||||
memory: MemoryConfig = Field(default_factory=MemoryConfig)
|
||||
logging: LoggingConfig = Field(default_factory=LoggingConfig)
|
||||
llm: LLMConfig = Field(default_factory=LLMConfig)
|
||||
obsidian: ObsidianConfig = Field(default_factory=ObsidianConfig)
|
||||
storage: StorageConfig = Field(default_factory=StorageConfig)
|
||||
|
||||
|
||||
class SearchRequest(BaseModel):
|
||||
|
||||
2
memory_gateway/workers/__init__.py
Normal file
2
memory_gateway/workers/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
"""Background worker skeletons."""
|
||||
|
||||
186
memory_gateway/workers/evermemos_worker.py
Normal file
186
memory_gateway/workers/evermemos_worker.py
Normal file
@ -0,0 +1,186 @@
|
||||
"""Minimal EverMemOS-style consolidation worker.
|
||||
|
||||
This worker is deliberately deterministic for the POC. It extracts stable
|
||||
candidate memories from session episodes, deduplicates them against existing
|
||||
records, promotes eligible records, and sends high-risk/high-value candidates
|
||||
to Obsidian review rather than blindly polluting long-term memory.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import re
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from memory_gateway.namespace import default_namespace_for_context
|
||||
from memory_gateway.obsidian_review import write_review_draft
|
||||
from memory_gateway.repositories import MetadataRepository
|
||||
from memory_gateway.schemas import (
|
||||
AccessContext,
|
||||
EpisodeRecord,
|
||||
MemoryRecord,
|
||||
MemoryType,
|
||||
SourceType,
|
||||
Visibility,
|
||||
)
|
||||
|
||||
|
||||
_SENTENCE_RE = re.compile(r"(?<=[。!?.!?])\s+|\n+")
|
||||
_NOISE_RE = re.compile(r"\s+")
|
||||
|
||||
|
||||
@dataclass
|
||||
class ConsolidationResult:
|
||||
session_id: str
|
||||
episodes: int
|
||||
candidates: list[MemoryRecord] = field(default_factory=list)
|
||||
promoted: list[MemoryRecord] = field(default_factory=list)
|
||||
duplicates: list[dict] = field(default_factory=list)
|
||||
review_drafts: list[str] = field(default_factory=list)
|
||||
conflicts: list[dict] = field(default_factory=list)
|
||||
|
||||
|
||||
class EverMemOSWorker:
|
||||
def __init__(self, repo: MetadataRepository) -> None:
|
||||
self.repo = repo
|
||||
|
||||
def consolidate_session(
|
||||
self,
|
||||
session_id: str,
|
||||
ctx: AccessContext,
|
||||
min_importance: float = 0.6,
|
||||
target_namespace: str | None = None,
|
||||
) -> ConsolidationResult:
|
||||
episodes = self.repo.list_session_episodes(session_id)
|
||||
result = ConsolidationResult(session_id=session_id, episodes=len(episodes))
|
||||
existing = list(self.repo.list_memories())
|
||||
seen_fingerprints = {self._fingerprint(memory.content): memory for memory in existing}
|
||||
|
||||
for episode in episodes:
|
||||
for candidate in self._extract_candidates(episode, ctx, min_importance, target_namespace):
|
||||
result.candidates.append(candidate)
|
||||
fingerprint = self._fingerprint(candidate.content)
|
||||
duplicate = seen_fingerprints.get(fingerprint)
|
||||
if duplicate:
|
||||
result.duplicates.append({"candidate_id": candidate.id, "existing_id": duplicate.id})
|
||||
continue
|
||||
|
||||
conflict_ids = self._find_conflicts(candidate, existing)
|
||||
if conflict_ids:
|
||||
draft = write_review_draft(candidate, reason="conflict", conflict_ids=conflict_ids)
|
||||
result.review_drafts.append(str(draft))
|
||||
result.conflicts.append({"candidate_id": candidate.id, "conflict_ids": conflict_ids})
|
||||
continue
|
||||
|
||||
if candidate.importance >= 0.85:
|
||||
draft = write_review_draft(candidate, reason="high_value")
|
||||
result.review_drafts.append(str(draft))
|
||||
continue
|
||||
|
||||
if candidate.importance >= min_importance and candidate.confidence >= 0.55:
|
||||
self.repo.upsert_memory(candidate)
|
||||
result.promoted.append(candidate)
|
||||
seen_fingerprints[fingerprint] = candidate
|
||||
existing.append(candidate)
|
||||
|
||||
return result
|
||||
|
||||
def _extract_candidates(
|
||||
self,
|
||||
episode: EpisodeRecord,
|
||||
ctx: AccessContext,
|
||||
min_importance: float,
|
||||
target_namespace: str | None,
|
||||
) -> list[MemoryRecord]:
|
||||
text = episode.summary or episode.content
|
||||
parts = [self._normalize(part) for part in _SENTENCE_RE.split(text) if self._normalize(part)]
|
||||
candidates: list[MemoryRecord] = []
|
||||
for part in parts:
|
||||
if len(part) < 20:
|
||||
continue
|
||||
memory_type = self._classify_type(part, episode.tags)
|
||||
importance = self._estimate_importance(part, episode.tags, min_importance)
|
||||
confidence = 0.65 if episode.summary else 0.58
|
||||
visibility = Visibility.WORKSPACE_SHARED if "workspace" in episode.tags and ctx.workspace_id else Visibility.PRIVATE
|
||||
memory_ctx = AccessContext(
|
||||
user_id=ctx.user_id,
|
||||
agent_id=ctx.agent_id,
|
||||
workspace_id=ctx.workspace_id,
|
||||
session_id=ctx.session_id,
|
||||
)
|
||||
candidates.append(
|
||||
MemoryRecord(
|
||||
user_id=ctx.user_id,
|
||||
agent_id=ctx.agent_id,
|
||||
workspace_id=ctx.workspace_id,
|
||||
session_id=episode.session_id,
|
||||
namespace=target_namespace or default_namespace_for_context(memory_ctx, visibility),
|
||||
memory_type=memory_type,
|
||||
content=part,
|
||||
summary=part[:180],
|
||||
tags=list(set(episode.tags + ["promoted-from-session", "evermemos-candidate"])),
|
||||
importance=importance,
|
||||
confidence=confidence,
|
||||
visibility=visibility,
|
||||
source=SourceType.EVERMEMOS,
|
||||
source_ref=episode.id,
|
||||
)
|
||||
)
|
||||
return candidates
|
||||
|
||||
def _classify_type(self, text: str, tags: list[str]) -> MemoryType:
|
||||
lowered = text.lower()
|
||||
if "preference" in tags or "偏好" in text:
|
||||
return MemoryType.PREFERENCE
|
||||
if "decision" in tags or "决定" in text or "决策" in text:
|
||||
return MemoryType.DECISION
|
||||
if "procedure" in tags or "步骤" in text or "流程" in text:
|
||||
return MemoryType.PROCEDURE
|
||||
if "经验" in text or "worked" in lowered or "failed" in lowered:
|
||||
return MemoryType.EXPERIENCE
|
||||
return MemoryType.SUMMARY
|
||||
|
||||
def _estimate_importance(self, text: str, tags: list[str], min_importance: float) -> float:
|
||||
importance = max(min_importance, 0.6)
|
||||
signal_words = ["必须", "不要", "偏好", "长期", "决策", "结论", "重要", "preference", "decision", "must"]
|
||||
if any(word in text.lower() for word in signal_words):
|
||||
importance += 0.15
|
||||
if "review" in tags or "high-value" in tags:
|
||||
importance += 0.2
|
||||
return min(1.0, importance)
|
||||
|
||||
def _find_conflicts(self, candidate: MemoryRecord, existing: list[MemoryRecord]) -> list[str]:
|
||||
candidate_text = candidate.content.lower()
|
||||
negation_signals = ["不要", "不再", "禁止", "not ", "never", "disable"]
|
||||
positive_signals = ["需要", "必须", "启用", "prefer", "always", "enable"]
|
||||
has_negative = any(signal in candidate_text for signal in negation_signals)
|
||||
has_positive = any(signal in candidate_text for signal in positive_signals)
|
||||
if not has_negative and not has_positive:
|
||||
return []
|
||||
|
||||
candidate_tokens = self._tokens(candidate.content)
|
||||
conflicts = []
|
||||
for memory in existing:
|
||||
if memory.user_id != candidate.user_id:
|
||||
continue
|
||||
if memory.memory_type != candidate.memory_type:
|
||||
continue
|
||||
overlap = candidate_tokens.intersection(self._tokens(memory.content))
|
||||
if len(overlap) < 2:
|
||||
continue
|
||||
memory_text = memory.content.lower()
|
||||
memory_negative = any(signal in memory_text for signal in negation_signals)
|
||||
memory_positive = any(signal in memory_text for signal in positive_signals)
|
||||
if has_negative != memory_negative or has_positive != memory_positive:
|
||||
conflicts.append(memory.id)
|
||||
return conflicts
|
||||
|
||||
def _tokens(self, text: str) -> set[str]:
|
||||
return {token for token in re.split(r"[^a-zA-Z0-9\u4e00-\u9fff]+", text.lower()) if len(token) >= 2}
|
||||
|
||||
def _normalize(self, text: str) -> str:
|
||||
return _NOISE_RE.sub(" ", text).strip(" -_*#\t")
|
||||
|
||||
def _fingerprint(self, text: str) -> str:
|
||||
normalized = self._normalize(text).lower()
|
||||
return hashlib.sha1(normalized.encode("utf-8")).hexdigest()
|
||||
|
||||
Reference in New Issue
Block a user