Files
memory-gateway/tests/test_v2_api.py

1926 lines
76 KiB
Python

import asyncio
import json
from datetime import datetime, timedelta, timezone
from pathlib import Path
from fastapi import FastAPI
import httpx
from httpx import ASGITransport, AsyncClient
from memory_gateway.config import load_config
from memory_gateway.backend_adapter_mapping import (
ADAPTER_MAPPING_SPECS,
DISALLOWED_PAYLOAD_FIELDS,
validate_control_plane_persisted_payload,
get_adapter_mapping_spec,
validate_control_plane_payload,
)
from memory_gateway.backend_normalization import (
map_backend_error_to_retryable,
normalize_evermemos_commit_response,
normalize_evermemos_ingest_response,
normalize_evermemos_retrieve_response,
normalize_openviking_commit_response,
normalize_openviking_ingest_response,
normalize_openviking_retrieve_response,
)
from memory_gateway.backend_contracts import (
BackendCommitResult,
BackendOperation,
BackendProducedRef,
BackendResultStatus,
BackendRetrieveResult,
BackendWriteResult,
OutboxEventStatus,
)
from memory_gateway.backend_ref_mapping import map_backend_ref_type
from memory_gateway.evermemos_client import EverMemOSClient
from memory_gateway.obsidian_review_client import ObsidianReviewClient
from memory_gateway.openviking_client import OpenVikingClient
from memory_gateway.repositories import InMemoryRepository, SQLiteRepository
from memory_gateway.schemas_v2 import (
BackendRefStatus,
BackendType,
CommitRequest,
IngestRequest,
MemoryRefType,
OperationStatus,
OutboxProcessResponse,
RetrieveRequest,
)
from memory_gateway.server_auth import verify_api_key_compat
from memory_gateway.services_v2 import MemoryGatewayV2Service
FIXTURE_DIR = Path(__file__).parent / "fixtures" / "backend_responses"
DOCS_DIR = Path(__file__).parent.parent / "docs"
def load_backend_fixture(name: str):
return json.loads((FIXTURE_DIR / name).read_text())
def build_ingest_payload(**overrides):
payload = {
"workspace_id": "ws_1",
"user_id": "user_a",
"agent_id": "agent_cli",
"session_id": "sess_1",
"turn_id": "turn_1",
"request_id": "req_1",
"namespace": "workspace/ws_1/user/user_a",
"source_type": "cli",
"source_event_id": "evt_1",
"role": "user",
"content": "Need to remember this conversation turn.",
"metadata": {"channel": "test"},
}
payload.update(overrides)
return payload
class FakeOpenVikingClient:
async def ingest_conversation_turn(self, payload):
return {
"status": "success",
"native_id": f"ov_{payload['turn_id']}",
"native_uri": f"viking://sessions/{payload['session_id']}/{payload['turn_id']}",
}
async def fake_openviking_factory():
return FakeOpenVikingClient()
class FakeEverMemOSClient:
def ingest_message(self, payload):
return {
"status": "success",
"native_id": f"em_{payload['turn_id']}",
"native_uri": f"evermemos://memories/{payload['turn_id']}",
}
class FailingEverMemOSClient:
def ingest_message(self, payload):
raise RuntimeError("evermemos unavailable")
class FakeCommitOpenVikingClient:
def __init__(self, result: BackendCommitResult) -> None:
self.result = result
async def commit_session_v2(self, payload):
return self.result
def fake_commit_openviking_factory(result: BackendCommitResult):
async def factory():
return FakeCommitOpenVikingClient(result)
return factory
class FakeCommitEverMemOSClient:
def __init__(self, result: BackendCommitResult) -> None:
self.result = result
def extract_profile_long_term_v2(self, payload):
return self.result
def commit_result(
backend_type: BackendType,
status: BackendResultStatus,
native_id: str | None = None,
native_uri: str | None = None,
retryable: bool = False,
error_message: str | None = None,
):
return BackendCommitResult(
backend_type=backend_type,
operation=BackendOperation.COMMIT_SESSION,
status=status,
native_id=native_id,
native_uri=native_uri,
retryable=retryable,
error_message=error_message,
)
def test_v2_adapters_return_backend_write_result_contract():
ov_result = asyncio.run(
OpenVikingClient().ingest_conversation_turn(
{
"workspace_id": "ws_1",
"session_id": "sess_1",
"turn_id": "turn_1",
}
)
)
em_result = EverMemOSClient().ingest_message(
{
"workspace_id": "ws_1",
"session_id": "sess_1",
"turn_id": "turn_1",
}
)
assert isinstance(ov_result, BackendWriteResult)
assert isinstance(em_result, BackendWriteResult)
assert ov_result.backend_type == BackendType.OPENVIKING
assert em_result.backend_type == BackendType.EVERMEMOS
assert ov_result.operation == BackendOperation.INGEST_TURN
assert em_result.operation == BackendOperation.INGEST_TURN
assert ov_result.status == BackendResultStatus.SKIPPED
assert em_result.status == BackendResultStatus.SKIPPED
def test_backend_env_overrides_enable_real_modes(monkeypatch, tmp_path):
monkeypatch.setenv("OPENVIKING_MODE", "real")
monkeypatch.setenv("OPENVIKING_BASE_URL", "http://openviking.env.test")
monkeypatch.setenv("OPENVIKING_API_KEY", "ov-env-token")
monkeypatch.setenv("OPENVIKING_TIMEOUT_SECONDS", "17")
monkeypatch.setenv("EVERMEMOS_MODE", "real")
monkeypatch.setenv("EVERMEMOS_BASE_URL", "http://evermemos.env.test")
monkeypatch.setenv("EVERMEMOS_API_KEY", "em-env-token")
monkeypatch.setenv("EVERMEMOS_INGEST_PATH", "/api/v1/memories")
config = load_config(str(tmp_path / "missing.yaml"))
assert config.openviking.mode == "real"
assert config.openviking.url == "http://openviking.env.test"
assert config.openviking.api_key == "ov-env-token"
assert config.openviking.timeout == 17
assert config.evermemos.mode == "real"
assert config.evermemos.url == "http://evermemos.env.test"
assert config.evermemos.api_key == "em-env-token"
assert config.evermemos.ingest_path == "/api/v1/memories"
def test_openviking_default_ingest_does_not_touch_network():
def handler(request):
raise AssertionError("offline OpenViking ingest should not perform HTTP")
client = OpenVikingClient(
base_url="http://openviking.test",
transport=httpx.MockTransport(handler),
)
result = asyncio.run(client.ingest_conversation_turn({"session_id": "sess_offline", "turn_id": "turn_1"}))
assert result.status == BackendResultStatus.SKIPPED
assert result.native_uri == "viking://sessions/sess_offline"
def test_openviking_adapter_config_doc_exists_and_covers_modes_and_security():
doc = (DOCS_DIR / "openviking_adapter_config.md").read_text()
assert "offline" in doc
assert "real" in doc
assert "base_url" in doc
assert "api_key" in doc
assert "verify_ssl" in doc
assert "ingest_path" in doc
assert "content" in doc
assert "messages" in doc
assert "transcript" in doc
def test_openviking_mode_offline_does_not_touch_network_even_with_base_url():
def handler(request):
raise AssertionError("offline mode should not perform HTTP")
client = OpenVikingClient(
mode="offline",
base_url="http://openviking.test",
enabled=False,
transport=httpx.MockTransport(handler),
)
result = asyncio.run(client.ingest_conversation_turn({"session_id": "sess_offline_mode", "turn_id": "turn_1"}))
assert result.status == BackendResultStatus.SKIPPED
def test_openviking_mode_skeleton_does_not_touch_network_even_with_base_url():
def handler(request):
raise AssertionError("skeleton mode should not perform HTTP")
client = OpenVikingClient(
mode="skeleton",
base_url="http://openviking.test",
enabled=False,
transport=httpx.MockTransport(handler),
)
result = asyncio.run(client.ingest_conversation_turn({"session_id": "sess_skeleton_mode", "turn_id": "turn_1"}))
assert result.status == BackendResultStatus.SKIPPED
def test_openviking_mode_real_with_base_url_uses_mock_http():
calls = {"count": 0}
def handler(request):
calls["count"] += 1
return httpx.Response(200, json=load_backend_fixture("openviking_ingest_real_success.json"))
client = OpenVikingClient(
mode="real",
enabled=False,
base_url="http://openviking.test",
transport=httpx.MockTransport(handler),
)
result = asyncio.run(client.ingest_conversation_turn({"session_id": "ov_real_sess_fixture_1", "turn_id": "turn_real", "content": "SECRET"}))
assert calls["count"] == 1
assert result.status == BackendResultStatus.SUCCESS
def test_openviking_enabled_true_without_mode_real_does_not_touch_network():
seen = {"calls": 0}
def handler(request):
seen["calls"] += 1
raise AssertionError("enabled=True must not perform HTTP without mode=real")
client = OpenVikingClient(
mode="offline",
enabled=True,
base_url="http://openviking.test",
transport=httpx.MockTransport(handler),
)
result = asyncio.run(client.ingest_conversation_turn({"session_id": "ov_real_sess_fixture_1", "turn_id": "turn_x", "content": "SECRET"}))
assert seen["calls"] == 0
assert result.status == BackendResultStatus.SKIPPED
def test_openviking_real_ingest_mode_real_without_base_url_returns_config_error():
client = OpenVikingClient(mode="real", base_url="")
result = asyncio.run(client.ingest_conversation_turn({"session_id": "sess_missing_url", "content": "SECRET"}))
assert result.status == BackendResultStatus.FAILED
assert result.retryable is False
assert result.error_code == "config_error"
assert "SECRET" not in json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
def test_openviking_real_ingest_success_uses_mock_http_and_normalization():
seen_payload = {}
seen_headers = {}
fixture = load_backend_fixture("openviking_ingest_real_success.json")
def handler(request):
seen_payload.update(json.loads(request.content.decode()))
seen_headers.update(dict(request.headers))
return httpx.Response(
200,
json=fixture,
)
client = OpenVikingClient(
mode="real",
base_url="http://openviking.test",
api_key="token",
transport=httpx.MockTransport(handler),
)
result = asyncio.run(
client.ingest_conversation_turn(
{
"workspace_id": "ws_1",
"session_id": "ov_real_sess_fixture_1",
"turn_id": "turn_real",
"content": "SECRET_REAL_CONTENT",
}
)
)
expected = normalize_openviking_ingest_response(fixture)
assert seen_payload["content"] == "SECRET_REAL_CONTENT"
assert seen_headers["x-api-key"] == "token"
assert result == expected
assert result.status == BackendResultStatus.SUCCESS
serialized = json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
assert "SECRET_REAL_CONTENT" not in serialized
assert "content" not in serialized
assert "token" not in serialized
def test_openviking_real_ingest_timeout_is_retryable_and_safe():
def handler(request):
raise httpx.ReadTimeout("timeout while sending SECRET_TIMEOUT_CONTENT")
client = OpenVikingClient(
mode="real",
base_url="http://openviking.test",
transport=httpx.MockTransport(handler),
)
result = asyncio.run(client.ingest_conversation_turn({"session_id": "sess_timeout", "content": "SECRET_TIMEOUT_CONTENT"}))
assert result.status == BackendResultStatus.FAILED
assert result.retryable is True
assert result.error_code == "timeout"
assert "SECRET_TIMEOUT_CONTENT" not in json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
def test_openviking_real_ingest_http_retryable_and_nonretryable_statuses():
def client_for_fixture(name, status_code):
return OpenVikingClient(
mode="real",
base_url="http://openviking.test",
api_key="super-secret-token",
transport=httpx.MockTransport(lambda request: httpx.Response(status_code, json=load_backend_fixture(name))),
)
result_429 = asyncio.run(client_for_fixture("openviking_ingest_real_error_500.json", 429).ingest_conversation_turn({"session_id": "sess_http"}))
assert result_429.status == BackendResultStatus.FAILED
assert result_429.retryable is True
assert result_429.error_code == "http_429"
result_500 = asyncio.run(client_for_fixture("openviking_ingest_real_error_500.json", 500).ingest_conversation_turn({"session_id": "sess_http"}))
assert result_500.status == BackendResultStatus.FAILED
assert result_500.retryable is True
assert result_500.error_code == "http_500"
assert "super-secret-token" not in json.dumps(result_500.model_dump(mode="json"), ensure_ascii=False)
for name, status_code in (
("openviking_ingest_real_error_401.json", 401),
("openviking_ingest_real_error_401.json", 403),
("openviking_ingest_real_error_422.json", 422),
):
result = asyncio.run(client_for_fixture(name, status_code).ingest_conversation_turn({"session_id": "sess_http"}))
assert result.status == BackendResultStatus.FAILED
assert result.retryable is False
assert result.error_code == f"http_{status_code}"
def test_openviking_real_ingest_invalid_json_returns_failed_retryable():
client = OpenVikingClient(
mode="real",
base_url="http://openviking.test",
transport=httpx.MockTransport(lambda request: httpx.Response(200, content=b"not-json")),
)
result = asyncio.run(client.ingest_conversation_turn({"session_id": "sess_invalid_json", "content": "SECRET_JSON"}))
assert result.status == BackendResultStatus.FAILED
assert result.retryable is True
assert result.error_code == "invalid_json"
assert "SECRET_JSON" not in json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
def test_evermemos_default_ingest_does_not_touch_network_even_if_enabled():
def handler(request):
raise AssertionError("EverMemOS ingest should not perform HTTP unless mode=real")
client = EverMemOSClient(
enabled=True,
mode="offline",
base_url="http://evermemos.test",
transport=httpx.MockTransport(handler),
)
result = client.ingest_message({"session_id": "sess_offline", "turn_id": "turn_1", "content": "SECRET"})
assert result.status == BackendResultStatus.SKIPPED
def test_evermemos_real_ingest_mode_real_without_base_url_returns_config_error():
client = EverMemOSClient(mode="real", base_url="")
result = client.ingest_message({"session_id": "sess_missing_url", "content": "SECRET"})
assert result.status == BackendResultStatus.FAILED
assert result.retryable is False
assert result.error_code == "config_error"
assert "SECRET" not in json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
def test_evermemos_real_ingest_success_uses_mock_http_and_normalization():
seen_payload = {}
seen_headers = {}
fixture = load_backend_fixture("evermemos_ingest_success.json")
def handler(request):
seen_payload.update(json.loads(request.content.decode()))
seen_headers.update(dict(request.headers))
return httpx.Response(200, json=fixture)
client = EverMemOSClient(
mode="real",
base_url="http://evermemos.test",
api_key="em-token",
transport=httpx.MockTransport(handler),
)
result = client.ingest_message(
{
"workspace_id": "ws_1",
"user_id": "user_a",
"session_id": "sess_1",
"turn_id": "turn_1",
"role": "user",
"content": "SECRET_EM_CONTENT",
"source_type": "cli",
"source_event_id": "evt_1",
"metadata": {"channel": "test"},
}
)
expected = normalize_evermemos_ingest_response(fixture)
assert seen_payload["content"] == "SECRET_EM_CONTENT"
assert seen_headers["x-api-key"] == "em-token"
assert seen_headers["authorization"] == "Bearer em-token"
assert result == expected
serialized = json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
assert "SECRET_EM_CONTENT" not in serialized
assert "content" not in serialized
assert "em-token" not in serialized
def test_evermemos_real_ingest_errors_are_backend_write_results_and_safe():
def client_for_response(status_code, body=None, content=None):
return EverMemOSClient(
mode="real",
base_url="http://evermemos.test",
api_key="em-super-secret-token",
transport=httpx.MockTransport(lambda request: httpx.Response(status_code, json=body, content=content)),
)
result_500 = client_for_response(500, {"error_code": "server_error"}).ingest_message({"content": "SECRET"})
assert result_500.status == BackendResultStatus.FAILED
assert result_500.retryable is True
assert result_500.error_code == "http_500"
for status_code in (401, 403, 422):
result = client_for_response(status_code, {"error_code": "auth_or_validation"}).ingest_message({"content": "SECRET"})
assert result.status == BackendResultStatus.FAILED
assert result.retryable is False
assert result.error_code == f"http_{status_code}"
invalid = client_for_response(200, content=b"not-json").ingest_message({"content": "SECRET"})
assert invalid.status == BackendResultStatus.FAILED
assert invalid.retryable is True
assert invalid.error_code == "invalid_json"
serialized = json.dumps(
[result_500.model_dump(mode="json"), invalid.model_dump(mode="json")],
ensure_ascii=False,
)
assert "SECRET" not in serialized
assert "em-super-secret-token" not in serialized
def test_evermemos_real_ingest_timeout_is_retryable_and_safe():
def handler(request):
raise httpx.ReadTimeout("timeout while sending SECRET_TIMEOUT_CONTENT")
client = EverMemOSClient(
mode="real",
base_url="http://evermemos.test",
transport=httpx.MockTransport(handler),
)
result = client.ingest_message({"session_id": "sess_timeout", "content": "SECRET_TIMEOUT_CONTENT"})
assert result.status == BackendResultStatus.FAILED
assert result.retryable is True
assert result.error_code == "timeout"
assert "SECRET_TIMEOUT_CONTENT" not in json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
def test_backend_adapter_mapping_spec_is_contract_first_and_control_plane_only():
expected = {
(BackendType.OPENVIKING, BackendOperation.INGEST_TURN),
(BackendType.OPENVIKING, BackendOperation.COMMIT_SESSION),
(BackendType.OPENVIKING, BackendOperation.RETRIEVE_CONTEXT),
(BackendType.EVERMEMOS, BackendOperation.INGEST_TURN),
(BackendType.EVERMEMOS, BackendOperation.COMMIT_SESSION),
(BackendType.EVERMEMOS, BackendOperation.RETRIEVE_CONTEXT),
(BackendType.OBSIDIAN, BackendOperation.CREATE_REVIEW_DRAFT),
}
assert {(spec.backend_type, spec.operation) for spec in ADAPTER_MAPPING_SPECS} == expected
for spec in ADAPTER_MAPPING_SPECS:
assert not DISALLOWED_PAYLOAD_FIELDS.intersection(spec.allowed_payload_fields)
openviking_commit = get_adapter_mapping_spec(BackendType.OPENVIKING, BackendOperation.COMMIT_SESSION)
evermemos_ingest = get_adapter_mapping_spec(BackendType.EVERMEMOS, BackendOperation.INGEST_TURN)
assert openviking_commit.adapter_method == "commit_session_v2"
assert openviking_commit.result_model is BackendCommitResult
assert evermemos_ingest.adapter_method == "ingest_message"
assert evermemos_ingest.result_model is BackendWriteResult
def test_control_plane_persisted_payload_validator_rejects_content_and_raw_request():
validate_control_plane_payload({"gateway_id": "gw_1", "session_id": "sess_1", "metadata": {"content_hash": "abc"}})
validate_control_plane_persisted_payload({"gateway_id": "gw_1", "metadata": {"source_channel": "test"}})
for blocked_key in ("content", "raw_request", "messages"):
try:
validate_control_plane_persisted_payload({"gateway_id": "gw_1", blocked_key: "should-not-pass"})
except ValueError as exc:
assert blocked_key in str(exc)
else:
raise AssertionError(f"{blocked_key} should be rejected")
def test_runtime_adapter_request_may_be_transient_but_outbox_payload_is_control_plane_only():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(repo=repo)
runtime_payload = service._apply_safety_policy(IngestRequest(**build_ingest_payload(content="TRANSIENT_ONLY_CONTENT")))
assert runtime_payload["content"] == "TRANSIENT_ONLY_CONTENT"
response = asyncio.run(
service.commit_session("sess_boundary", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
event = repo.list_outbox_events_by_job(response.job_id)[0]
outbox_payload = service._outbox_payload(event)
assert "content" not in outbox_payload
assert "raw_request" not in outbox_payload
validate_control_plane_persisted_payload(outbox_payload)
def test_commit_and_retrieve_adapter_skeletons_return_unified_contracts():
payload = {"workspace_id": "ws_1", "session_id": "sess_1", "gateway_id": "gw_1"}
ov_commit = asyncio.run(OpenVikingClient().commit_session_v2(payload))
ov_retrieve = asyncio.run(OpenVikingClient().retrieve_context_v2(payload))
em_commit = EverMemOSClient().extract_profile_long_term_v2(payload)
em_retrieve = EverMemOSClient().retrieve_context_v2(payload)
assert isinstance(ov_commit, BackendCommitResult)
assert isinstance(em_commit, BackendCommitResult)
assert isinstance(ov_retrieve, BackendRetrieveResult)
assert isinstance(em_retrieve, BackendRetrieveResult)
assert ov_commit.status == BackendResultStatus.SUCCESS
assert em_commit.status == BackendResultStatus.SUCCESS
assert ov_retrieve.status == BackendResultStatus.SUCCESS
assert em_retrieve.status == BackendResultStatus.SUCCESS
assert ov_commit.refs[0].ref_type == MemoryRefType.SESSION_ARCHIVE
assert {ref.ref_type for ref in em_commit.refs} == {MemoryRefType.PROFILE, MemoryRefType.LONG_TERM_MEMORY}
assert len(ov_retrieve.items) == 1
assert len(em_retrieve.items) == 2
def test_client_skeletons_use_normalization_contracts_and_safe_metadata():
payload = {
"workspace_id": "ws_1",
"user_id": "user_a",
"session_id": "sess_contract",
"turn_id": "turn_contract",
"content": "TRANSIENT_CONTENT_ONLY",
"raw_request": {"content": "TRANSIENT_CONTENT_ONLY"},
}
ov_client = OpenVikingClient()
em_client = EverMemOSClient()
ov_ingest = asyncio.run(ov_client.ingest_conversation_turn(payload))
ov_commit = asyncio.run(ov_client.commit_session_v2(payload))
em_ingest = em_client.ingest_message(payload)
em_commit = em_client.extract_profile_long_term_v2(payload)
assert isinstance(ov_ingest, BackendWriteResult)
assert isinstance(em_ingest, BackendWriteResult)
assert isinstance(ov_commit, BackendCommitResult)
assert isinstance(em_commit, BackendCommitResult)
assert ov_ingest == ov_client._normalize_ingest_response(
{
"status": "skipped",
"session_id": "sess_contract",
"uri": "viking://sessions/sess_contract",
"metadata": {
"reason": "openviking_v2_ingest_adapter_not_configured",
"schema_version": "openviking.fixture.ingest.v2",
},
}
)
assert em_ingest == em_client._normalize_ingest_response(
{
"status": "skipped",
"memory_id": "turn_contract",
"metadata": {
"reason": "evermemos_v2_ingest_adapter_not_configured",
"schema_version": "evermemos.fixture.ingest.v2",
},
}
)
serialized = json.dumps(
{
"ov_ingest": ov_ingest.model_dump(mode="json"),
"ov_commit": ov_commit.model_dump(mode="json"),
"em_ingest": em_ingest.model_dump(mode="json"),
"em_commit": em_commit.model_dump(mode="json"),
},
ensure_ascii=False,
)
for blocked in ("TRANSIENT_CONTENT_ONLY", "content", "raw_request", "messages", "conversation", "transcript"):
assert blocked not in serialized
def test_retrieve_skeletons_use_retrieve_normalization_and_safe_metadata():
payload = {
"workspace_id": "ws_1",
"user_id": "user_a",
"session_id": "sess_retrieve_contract",
"query": "fixture query",
"content": "TRANSIENT_RETRIEVE_CONTENT",
}
ov_result = asyncio.run(OpenVikingClient().retrieve_context_v2(payload))
em_result = EverMemOSClient().retrieve_context_v2(payload)
assert isinstance(ov_result, BackendRetrieveResult)
assert isinstance(em_result, BackendRetrieveResult)
assert ov_result.status == BackendResultStatus.SUCCESS
assert em_result.status == BackendResultStatus.SUCCESS
assert ov_result.items[0].source_backend == BackendType.OPENVIKING
assert em_result.items[0].source_backend == BackendType.EVERMEMOS
assert ov_result.items[0].text
assert em_result.items[0].ref_id
serialized = json.dumps(
{"ov": ov_result.model_dump(mode="json"), "em": em_result.model_dump(mode="json")},
ensure_ascii=False,
)
for blocked in ("TRANSIENT_RETRIEVE_CONTENT", "content", "raw_request", "messages", "conversation", "transcript"):
assert blocked not in serialized
def test_openviking_commit_skeleton_ref_type_is_mapped_from_fixture():
result = asyncio.run(OpenVikingClient().commit_session_v2({"session_id": "sess_ov_map"}))
assert result.refs
assert result.refs[0].ref_type == MemoryRefType.SESSION_ARCHIVE
assert result.refs[0].native_id == "ov_session_summary:sess_ov_map"
def test_evermemos_skeleton_multiple_refs_are_written_by_process_outbox_event():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SKIPPED)
),
evermemos_client=EverMemOSClient(),
)
response = asyncio.run(
service.commit_session("sess_em_skeleton", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
event = next(event for event in repo.list_outbox_events_by_job(response.job_id) if event.backend_type == BackendType.EVERMEMOS)
updated = asyncio.run(service.process_outbox_event(event.id))
refs = repo.list_memory_refs(session_id="sess_em_skeleton", backend_type=BackendType.EVERMEMOS, status=BackendRefStatus.SUCCESS)
assert updated.status == OutboxEventStatus.SUCCESS
assert len(refs) == 2
assert {ref.ref_type for ref in refs} == {MemoryRefType.PROFILE, MemoryRefType.LONG_TERM_MEMORY}
def test_obsidian_review_adapter_skeleton_returns_skipped_write_result():
result = ObsidianReviewClient().create_review_draft_v2({"event_id": "evt_review"})
assert isinstance(result, BackendWriteResult)
assert result.backend_type == BackendType.OBSIDIAN
assert result.operation == BackendOperation.CREATE_REVIEW_DRAFT
assert result.status == BackendResultStatus.SKIPPED
def test_backend_commit_result_supports_multiple_produced_refs():
result = BackendCommitResult(
backend_type=BackendType.EVERMEMOS,
status=BackendResultStatus.SUCCESS,
refs=[
BackendProducedRef(ref_type=MemoryRefType.PROFILE, native_id="profile_1"),
BackendProducedRef(ref_type=MemoryRefType.LONG_TERM_MEMORY, native_uri="evermemos://memories/long_1"),
],
)
dumped = result.model_dump(mode="json")
assert len(result.refs) == 2
assert dumped["refs"][0]["ref_type"] == "profile"
assert dumped["refs"][1]["native_uri"] == "evermemos://memories/long_1"
def test_backend_ref_type_mapping_and_unknown_fallback_preserves_original_type():
mapped, metadata = map_backend_ref_type(BackendType.OPENVIKING, "context_resource")
assert mapped == MemoryRefType.CONTEXT_RESOURCE
assert metadata == {}
mapped, metadata = map_backend_ref_type(BackendType.OPENVIKING, "session_summary")
assert mapped == MemoryRefType.SESSION_ARCHIVE
assert metadata == {}
mapped, metadata = map_backend_ref_type(BackendType.EVERMEMOS, "preference")
assert mapped == MemoryRefType.PROFILE
assert metadata == {}
mapped, metadata = map_backend_ref_type(BackendType.EVERMEMOS, "unknown_signal")
assert mapped == MemoryRefType.LONG_TERM_MEMORY
assert metadata["original_ref_type"] == "unknown_signal"
def test_openviking_commit_fixture_normalizes_to_backend_commit_result_without_unsafe_metadata():
raw = {
"status": "ok",
"session_id": "sess_norm",
"latency_ms": 18,
"metadata": {"backend_request_id": "ov_req_1", "content": "SECRET", "raw_request": {"content": "SECRET"}},
"result": {
"refs": [
{
"type": "session_archive",
"id": "ov_archive_1",
"uri": "viking://sessions/sess_norm",
"metadata": {"schema_version": "ov.v1", "messages": ["SECRET"]},
}
]
},
}
result = normalize_openviking_commit_response(raw)
assert result.status == BackendResultStatus.SUCCESS
assert result.backend_type == BackendType.OPENVIKING
assert len(result.refs) == 1
assert result.refs[0].ref_type == MemoryRefType.SESSION_ARCHIVE
assert result.refs[0].native_id == "ov_archive_1"
serialized = json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
assert "SECRET" not in serialized
assert "raw_request" not in serialized
assert "messages" not in serialized
def test_backend_response_fixture_files_exist_and_load():
names = {
"openviking_ingest_success.json",
"openviking_ingest_real_success.json",
"openviking_ingest_real_error_401.json",
"openviking_ingest_real_error_422.json",
"openviking_ingest_real_error_500.json",
"openviking_commit_success.json",
"openviking_retrieve_success.json",
"evermemos_ingest_success.json",
"evermemos_commit_success_multiple_refs.json",
"evermemos_retrieve_success.json",
}
for name in names:
payload = load_backend_fixture(name)
assert payload["status"]
def test_openviking_success_fixtures_normalize_without_unsafe_metadata():
ingest = normalize_openviking_ingest_response(load_backend_fixture("openviking_ingest_success.json"))
commit = normalize_openviking_commit_response(load_backend_fixture("openviking_commit_success.json"))
retrieve = normalize_openviking_retrieve_response(load_backend_fixture("openviking_retrieve_success.json"))
assert ingest.status == BackendResultStatus.SUCCESS
assert ingest.native_id == "ov_turn_fixture_1"
assert commit.status == BackendResultStatus.SUCCESS
assert {ref.ref_type for ref in commit.refs} == {MemoryRefType.SESSION_ARCHIVE, MemoryRefType.CONTEXT_RESOURCE}
assert retrieve.status == BackendResultStatus.SUCCESS
assert len(retrieve.items) == 2
assert retrieve.items[0].source_backend == BackendType.OPENVIKING
serialized = json.dumps(
{
"ingest": ingest.model_dump(mode="json"),
"commit": commit.model_dump(mode="json"),
"retrieve": retrieve.model_dump(mode="json"),
},
ensure_ascii=False,
)
for blocked in ("content", "raw_request", "messages", "conversation", "transcript"):
assert blocked not in serialized
def test_evermemos_commit_fixture_normalizes_multiple_produced_refs_and_unknown_type():
raw = {
"status": "success",
"data": {
"produced_refs": [
{"ref_type": "episodic_memory", "memory_id": "episode_1", "metadata": {"confidence": 0.82}},
{"ref_type": "profile", "profile_id": "profile_1", "metadata": {"content": "SECRET_PROFILE"}},
{"ref_type": "unknown_kind", "id": "long_1", "metadata": {"score": 0.9}},
]
},
}
result = normalize_evermemos_commit_response(raw)
assert result.status == BackendResultStatus.SUCCESS
assert len(result.refs) == 3
assert [ref.ref_type for ref in result.refs] == [
MemoryRefType.EPISODIC_MEMORY,
MemoryRefType.PROFILE,
MemoryRefType.LONG_TERM_MEMORY,
]
assert result.refs[2].metadata["original_ref_type"] == "unknown_kind"
assert "SECRET_PROFILE" not in json.dumps(result.model_dump(mode="json"), ensure_ascii=False)
def test_evermemos_success_fixtures_normalize_without_unsafe_metadata():
ingest = normalize_evermemos_ingest_response(load_backend_fixture("evermemos_ingest_success.json"))
commit = normalize_evermemos_commit_response(load_backend_fixture("evermemos_commit_success_multiple_refs.json"))
retrieve = normalize_evermemos_retrieve_response(load_backend_fixture("evermemos_retrieve_success.json"))
assert ingest.status == BackendResultStatus.SUCCESS
assert ingest.native_id == "em_memory_fixture_1"
assert commit.status == BackendResultStatus.SUCCESS
assert {ref.ref_type for ref in commit.refs} == {
MemoryRefType.EPISODIC_MEMORY,
MemoryRefType.PROFILE,
MemoryRefType.LONG_TERM_MEMORY,
}
assert retrieve.status == BackendResultStatus.SUCCESS
assert len(retrieve.items) == 2
assert retrieve.items[0].source_backend == BackendType.EVERMEMOS
assert retrieve.items[0].memory_type == "episodic_memory"
serialized = json.dumps(
{
"ingest": ingest.model_dump(mode="json"),
"commit": commit.model_dump(mode="json"),
"retrieve": retrieve.model_dump(mode="json"),
},
ensure_ascii=False,
)
for blocked in ("content", "raw_request", "messages", "conversation", "transcript"):
assert blocked not in serialized
def test_malformed_retrieve_response_returns_skipped_empty_result():
ov = normalize_openviking_retrieve_response({})
em = normalize_evermemos_retrieve_response({"data": {"unexpected": "shape"}})
assert ov.status == BackendResultStatus.SKIPPED
assert ov.items == []
assert em.status == BackendResultStatus.SUCCESS
assert em.items == []
def test_ingest_response_normalizers_return_write_results_and_sanitize_metadata():
ov = normalize_openviking_ingest_response(
{
"status": "created",
"id": "ov_turn_1",
"uri": "viking://sessions/sess/turn",
"metadata": {"backend_request_id": "ov_req", "conversation": "SECRET"},
}
)
em = normalize_evermemos_ingest_response(
{
"status": "success",
"memory_id": "em_turn_1",
"metadata": {"trace_id": "trace_1", "transcript": "SECRET"},
}
)
assert isinstance(ov, BackendWriteResult)
assert isinstance(em, BackendWriteResult)
assert ov.native_id == "ov_turn_1"
assert em.native_id == "em_turn_1"
serialized = json.dumps({"ov": ov.model_dump(mode="json"), "em": em.model_dump(mode="json")}, ensure_ascii=False)
assert "SECRET" not in serialized
assert "conversation" not in serialized
assert "transcript" not in serialized
def test_backend_error_retryable_mapping():
for status_code in (429, 500, 502, 503, 504):
assert map_backend_error_to_retryable(BackendType.OPENVIKING, status_code=status_code) is True
assert map_backend_error_to_retryable(BackendType.EVERMEMOS, error_code="timeout") is True
assert map_backend_error_to_retryable(BackendType.EVERMEMOS, error_message="network_error: reset") is True
assert map_backend_error_to_retryable(BackendType.OPENVIKING, error_code="mystery") is True
for status_code in (400, 401, 403, 404, 422):
assert map_backend_error_to_retryable(BackendType.EVERMEMOS, status_code=status_code) is False
def test_client_map_error_contracts_for_future_http_integration():
class ResponseLike:
def __init__(self, status_code):
self.status_code = status_code
def __str__(self):
return f"response {self.status_code}"
ov_client = OpenVikingClient()
em_client = EverMemOSClient()
for status_code in (429, 500, 502, 503, 504):
assert ov_client._map_error(ResponseLike(status_code)) is True
assert em_client._map_error(ResponseLike(status_code)) is True
for status_code in (400, 401, 403, 404, 422):
assert ov_client._map_error(ResponseLike(status_code)) is False
assert em_client._map_error(ResponseLike(status_code)) is False
assert ov_client._map_error(TimeoutError("timeout while reading")) is True
assert em_client._map_error(ConnectionError("network_error connection reset")) is True
assert ov_client._map_error(RuntimeError("unknown backend failure")) is True
def test_v2_ingest_schema_constructs():
request = IngestRequest(**build_ingest_payload())
assert request.workspace_id == "ws_1"
assert request.request_id == "req_1"
assert request.policy.allow_openviking is True
def test_ingest_service_records_two_success_refs():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_openviking_factory,
evermemos_client=FakeEverMemOSClient(),
)
response = asyncio.run(service.ingest_conversation_turn(IngestRequest(**build_ingest_payload())))
assert response.status == "success"
assert len(response.refs) == 2
assert {ref.backend_type.value for ref in response.refs} == {"openviking", "evermemos"}
assert {ref.status for ref in repo.list_memory_refs()} == {BackendRefStatus.SUCCESS}
assert len(repo.list_memory_refs(backend_type="openviking", status=BackendRefStatus.SUCCESS)) == 1
def test_v2_ingest_service_openviking_real_mock_success_writes_safe_memory_ref():
fixture = load_backend_fixture("openviking_ingest_real_success.json")
def handler(request):
payload = json.loads(request.content.decode())
assert payload["content"] == "SECRET_SERVICE_REAL_CONTENT"
assert request.headers["x-api-key"] == "ov-super-secret-token"
return httpx.Response(200, json=fixture)
async def real_openviking_factory():
return OpenVikingClient(
mode="real",
base_url="http://openviking.test",
api_key="ov-super-secret-token",
transport=httpx.MockTransport(handler),
)
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=real_openviking_factory,
evermemos_client=FakeEverMemOSClient(),
)
response = asyncio.run(
service.ingest_conversation_turn(
IngestRequest(**build_ingest_payload(session_id="ov_real_sess_fixture_1", content="SECRET_SERVICE_REAL_CONTENT"))
)
)
ov_ref = repo.list_memory_refs(backend_type=BackendType.OPENVIKING, status=BackendRefStatus.SUCCESS)[0]
audit_json = json.dumps([entry.model_dump(mode="json") for entry in repo.list_audit()], ensure_ascii=False)
assert response.status == OperationStatus.SUCCESS
assert ov_ref.native_id == "ov_real_turn_fixture_1"
assert ov_ref.native_uri == "viking://sessions/ov_real_sess_fixture_1/turns/ov_real_turn_fixture_1"
serialized = json.dumps(ov_ref.model_dump(mode="json"), ensure_ascii=False)
assert "SECRET_SERVICE_REAL_CONTENT" not in serialized
assert "ov-super-secret-token" not in serialized
assert "raw_request" not in serialized
assert "content" not in ov_ref.metadata
assert "ov-super-secret-token" not in audit_json
def test_v2_ingest_service_real_mock_success_writes_openviking_and_evermemos_refs_safely():
ov_fixture = load_backend_fixture("openviking_ingest_real_success.json")
em_fixture = load_backend_fixture("evermemos_ingest_success.json")
seen = {"openviking": 0, "evermemos": 0}
def openviking_handler(request):
payload = json.loads(request.content.decode())
assert payload["content"] == "SECRET_DUAL_REAL_CONTENT"
assert request.headers["x-api-key"] == "ov-dual-token"
seen["openviking"] += 1
return httpx.Response(200, json=ov_fixture)
def evermemos_handler(request):
payload = json.loads(request.content.decode())
assert payload["content"] == "SECRET_DUAL_REAL_CONTENT"
assert request.headers["x-api-key"] == "em-dual-token"
assert request.headers["authorization"] == "Bearer em-dual-token"
seen["evermemos"] += 1
return httpx.Response(200, json=em_fixture)
async def real_openviking_factory():
return OpenVikingClient(
mode="real",
base_url="http://openviking.test",
api_key="ov-dual-token",
transport=httpx.MockTransport(openviking_handler),
)
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=real_openviking_factory,
evermemos_client=EverMemOSClient(
mode="real",
base_url="http://evermemos.test",
api_key="em-dual-token",
transport=httpx.MockTransport(evermemos_handler),
),
)
response = asyncio.run(
service.ingest_conversation_turn(
IngestRequest(
**build_ingest_payload(
session_id="ov_real_sess_fixture_1",
source_type="cli",
content="SECRET_DUAL_REAL_CONTENT",
trace={"trace_id": "trace_dual_real", "request_id": "trace_req_dual"},
)
)
)
)
refs = repo.list_memory_refs()
serialized_refs = json.dumps([ref.model_dump(mode="json") for ref in refs], ensure_ascii=False)
audit_json = json.dumps([entry.model_dump(mode="json") for entry in repo.list_audit()], ensure_ascii=False)
assert response.status == OperationStatus.SUCCESS
assert seen == {"openviking": 1, "evermemos": 1}
assert {ref.backend_type for ref in refs} == {BackendType.OPENVIKING, BackendType.EVERMEMOS}
assert {ref.status for ref in refs} == {BackendRefStatus.SUCCESS}
assert {ref.content_hash for ref in refs}
assert "trace_dual_real" in serialized_refs
for blocked in ("SECRET_DUAL_REAL_CONTENT", "ov-dual-token", "em-dual-token", "raw_request", "messages", "conversation", "transcript"):
assert blocked not in serialized_refs
for blocked in ("SECRET_DUAL_REAL_CONTENT", "ov-dual-token", "em-dual-token", "raw_request", "messages", "transcript"):
assert blocked not in audit_json
def test_ingest_service_backend_failure_is_partial_success():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_openviking_factory,
evermemos_client=FailingEverMemOSClient(),
)
response = asyncio.run(service.ingest_conversation_turn(IngestRequest(**build_ingest_payload())))
assert response.status == "partial_success"
assert len(response.refs) == 2
failed = [ref for ref in response.refs if ref.status == BackendRefStatus.FAILED]
assert len(failed) == 1
assert failed[0].backend_type.value == "evermemos"
assert "evermemos unavailable" in failed[0].error_message
def test_ingest_service_records_two_skipped_refs_when_policy_disables_backends():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_openviking_factory,
evermemos_client=FakeEverMemOSClient(),
)
response = asyncio.run(
service.ingest_conversation_turn(
IngestRequest(
**build_ingest_payload(
policy={
"allow_openviking": False,
"allow_evermemos": False,
}
)
)
)
)
assert response.status == "skipped"
assert len(response.refs) == 2
assert {ref.status for ref in response.refs} == {BackendRefStatus.SKIPPED}
assert len(repo.list_memory_refs()) == 2
def test_duplicate_idempotency_key_upserts_memory_refs_without_duplicates():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_openviking_factory,
evermemos_client=FakeEverMemOSClient(),
)
first = asyncio.run(
service.ingest_conversation_turn(
IngestRequest(**build_ingest_payload(idempotency_key="idem_1", request_id="req_1"))
)
)
second = asyncio.run(
service.ingest_conversation_turn(
IngestRequest(
**build_ingest_payload(
idempotency_key="idem_1",
request_id="req_2",
source_event_id="evt_changed",
turn_id="turn_changed",
)
)
)
)
refs = repo.list_memory_refs()
assert len(refs) == 2
assert {ref.id for ref in first.refs} == {ref.id for ref in second.refs}
assert first.gateway_id == second.gateway_id
def test_memory_ref_metadata_does_not_store_conversation_content_or_raw_request():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_openviking_factory,
evermemos_client=FakeEverMemOSClient(),
)
sensitive_content = "SECRET_CONVERSATION_CONTENT_SHOULD_NOT_BE_STORED"
asyncio.run(
service.ingest_conversation_turn(
IngestRequest(
**build_ingest_payload(
content=sensitive_content,
metadata={"channel": "cli", "raw_request": {"content": sensitive_content}},
)
)
)
)
for ref in repo.list_memory_refs():
metadata_json = json.dumps(ref.metadata, ensure_ascii=False)
assert sensitive_content not in metadata_json
assert "raw_request" not in metadata_json
assert ref.content_hash
assert ref.content_hash in metadata_json
def test_sqlite_repository_persists_v2_memory_refs(tmp_path):
repo = SQLiteRepository(tmp_path / "memory_gateway.sqlite3")
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_openviking_factory,
evermemos_client=FakeEverMemOSClient(),
)
asyncio.run(service.ingest_conversation_turn(IngestRequest(**build_ingest_payload(turn_id="turn_sqlite"))))
reloaded = SQLiteRepository(tmp_path / "memory_gateway.sqlite3")
refs = reloaded.list_memory_refs(
workspace_id="ws_1",
backend_type="openviking",
status=BackendRefStatus.SUCCESS,
)
assert len(refs) == 1
assert refs[0].turn_id == "turn_sqlite"
def test_commit_session_creates_commit_job_and_outbox_events():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(repo=repo)
response = asyncio.run(
service.commit_session(
"sess_commit",
CommitRequest(
workspace_id="ws_1",
user_id="user_a",
agent_id="agent_cli",
namespace="workspace/ws_1/user/user_a",
request_id="commit_req_1",
),
)
)
job = repo.get_commit_job(response.job_id)
events = repo.list_outbox_events(gateway_id=response.metadata["gateway_id"])
assert response.status == "accepted"
assert job is not None
assert job.session_id == "sess_commit"
assert job.status.value == "accepted"
assert len(events) == 2
assert {event.backend_type for event in events} == {BackendType.OPENVIKING, BackendType.EVERMEMOS}
assert {event.operation for event in events} == {BackendOperation.COMMIT_SESSION}
assert {event.status for event in events} == {OutboxEventStatus.PENDING}
def test_sqlite_repository_persists_commit_job_and_outbox_events(tmp_path):
repo = SQLiteRepository(tmp_path / "memory_gateway.sqlite3")
service = MemoryGatewayV2Service(repo=repo)
response = asyncio.run(
service.commit_session(
"sess_commit_sqlite",
CommitRequest(
workspace_id="ws_1",
user_id="user_a",
agent_id="agent_cli",
namespace="workspace/ws_1/user/user_a",
idempotency_key="commit_idem_1",
),
)
)
reloaded = SQLiteRepository(tmp_path / "memory_gateway.sqlite3")
job = reloaded.get_commit_job(response.job_id)
events = reloaded.list_outbox_events(gateway_id=response.metadata["gateway_id"])
assert job is not None
assert job.session_id == "sess_commit_sqlite"
assert len(events) == 2
assert {event.payload_ref for event in events} == {f"commit_job:{response.job_id}"}
def test_sqlite_repository_claims_due_outbox_with_lease_fields(tmp_path):
repo = SQLiteRepository(tmp_path / "memory_gateway.sqlite3")
service = MemoryGatewayV2Service(repo=repo)
response = asyncio.run(
service.commit_session(
"sess_sqlite_claim",
CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"),
)
)
claimed = repo.claim_pending_outbox_events(limit=1, worker_id="sqlite_worker", lease_seconds=30)
reloaded = SQLiteRepository(tmp_path / "memory_gateway.sqlite3")
events = reloaded.list_outbox_events_by_job(response.job_id)
assert len(claimed) == 1
assert sum(1 for event in events if event.status == OutboxEventStatus.PROCESSING) == 1
claimed_event = next(event for event in events if event.status == OutboxEventStatus.PROCESSING)
assert claimed_event.locked_by == "sqlite_worker"
assert claimed_event.lease_expires_at is not None
def test_outbox_event_does_not_store_conversation_content_or_raw_request():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(repo=repo)
sensitive_content = "SECRET_COMMIT_CONTENT_SHOULD_NOT_BE_STORED"
response = asyncio.run(
service.commit_session(
"sess_commit",
CommitRequest(
workspace_id="ws_1",
user_id="user_a",
agent_id="agent_cli",
namespace="workspace/ws_1/user/user_a",
metadata={"raw_request": {"content": sensitive_content}},
),
)
)
for event in repo.list_outbox_events(gateway_id=response.metadata["gateway_id"]):
event_json = json.dumps(event.model_dump(mode="json"), ensure_ascii=False)
assert sensitive_content not in event_json
assert "raw_request" not in event_json
assert event.payload_ref == f"commit_job:{response.job_id}"
def test_retrieve_response_contract_contains_items_refs_conflicts_trace_id_status():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_openviking_factory,
evermemos_client=FakeEverMemOSClient(),
)
asyncio.run(service.ingest_conversation_turn(IngestRequest(**build_ingest_payload())))
response = asyncio.run(
service.retrieve_context(
RetrieveRequest(
workspace_id="ws_1",
user_id="user_a",
agent_id="agent_cli",
session_id="sess_1",
query="remember",
metadata={"trace_id": "trace_1"},
)
)
)
dumped = response.model_dump()
assert set(["items", "refs", "conflicts", "trace_id", "status"]).issubset(dumped)
assert response.trace_id == "trace_1"
assert response.status.value == "success"
assert len(response.items) == len(response.refs)
assert response.conflicts == []
def test_process_commit_job_success_updates_job_and_writes_memory_refs():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SUCCESS, native_id="ov_commit_1", native_uri="viking://sessions/sess_commit")
),
evermemos_client=FakeCommitEverMemOSClient(
commit_result(BackendType.EVERMEMOS, BackendResultStatus.SUCCESS, native_id="em_commit_1", native_uri="evermemos://memories/em_commit_1")
),
)
response = asyncio.run(
service.commit_session(
"sess_commit",
CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli", namespace="workspace/ws_1/user/user_a"),
)
)
job = asyncio.run(service.process_commit_job(response.job_id))
events = repo.list_outbox_events_by_job(response.job_id)
refs = repo.list_memory_refs(session_id="sess_commit", status=BackendRefStatus.SUCCESS)
assert job.status.value == "success"
assert job.started_at is not None
assert job.finished_at is not None
assert job.created_refs_count == 2
assert {event.status for event in events} == {OutboxEventStatus.SUCCESS}
assert len(refs) == 2
assert {ref.backend_type for ref in refs} == {BackendType.OPENVIKING, BackendType.EVERMEMOS}
def test_process_outbox_event_writes_multiple_produced_memory_refs():
repo = InMemoryRepository()
sensitive_content = "SECRET_PRODUCED_REF_CONTENT"
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
BackendCommitResult(
backend_type=BackendType.OPENVIKING,
operation=BackendOperation.COMMIT_SESSION,
status=BackendResultStatus.SUCCESS,
refs=[
BackendProducedRef(
ref_type=MemoryRefType.SESSION_ARCHIVE,
native_id="ov_session_archive_1",
native_uri="viking://sessions/sess_multi",
metadata={"backend_request_id": "req_ov_1", "content": sensitive_content},
),
BackendProducedRef(
ref_type=MemoryRefType.PROFILE,
native_id="ov_profile_1",
metadata={"source_channel": "worker", "raw_request": {"content": sensitive_content}},
),
],
metadata={"latency_ms": 12, "messages": [sensitive_content]},
)
),
)
response = asyncio.run(
service.commit_session(
"sess_multi",
CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli", namespace="workspace/ws_1/user/user_a"),
)
)
event = next(event for event in repo.list_outbox_events_by_job(response.job_id) if event.backend_type == BackendType.OPENVIKING)
updated = asyncio.run(service.process_outbox_event(event.id))
refs = repo.list_memory_refs(session_id="sess_multi", backend_type=BackendType.OPENVIKING, status=BackendRefStatus.SUCCESS)
assert updated.status == OutboxEventStatus.SUCCESS
assert len(refs) == 2
assert {ref.ref_type for ref in refs} == {MemoryRefType.SESSION_ARCHIVE, MemoryRefType.PROFILE}
assert {ref.native_id for ref in refs} == {"ov_session_archive_1", "ov_profile_1"}
for ref in refs:
serialized = json.dumps(ref.model_dump(mode="json"), ensure_ascii=False)
assert sensitive_content not in serialized
assert "raw_request" not in serialized
assert "messages" not in serialized
assert "conversation" not in serialized
assert "transcript" not in serialized
def test_process_outbox_event_writes_same_ref_type_with_different_native_ids():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
BackendCommitResult(
backend_type=BackendType.OPENVIKING,
operation=BackendOperation.COMMIT_SESSION,
status=BackendResultStatus.SUCCESS,
refs=[
BackendProducedRef(ref_type=MemoryRefType.CONTEXT_RESOURCE, native_id="resource_1"),
BackendProducedRef(ref_type=MemoryRefType.CONTEXT_RESOURCE, native_id="resource_2"),
],
)
),
)
response = asyncio.run(
service.commit_session("sess_same_type", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
event = next(event for event in repo.list_outbox_events_by_job(response.job_id) if event.backend_type == BackendType.OPENVIKING)
asyncio.run(service.process_outbox_event(event.id))
refs = repo.list_memory_refs(session_id="sess_same_type", backend_type=BackendType.OPENVIKING, ref_type=MemoryRefType.CONTEXT_RESOURCE)
assert len(refs) == 2
assert {ref.native_id for ref in refs} == {"resource_1", "resource_2"}
assert len({ref.id for ref in refs}) == 2
def test_memory_ref_id_uses_stable_fallback_when_native_ref_is_missing():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
BackendCommitResult(
backend_type=BackendType.OPENVIKING,
operation=BackendOperation.COMMIT_SESSION,
status=BackendResultStatus.SUCCESS,
refs=[
BackendProducedRef(ref_type=MemoryRefType.SESSION_ARCHIVE, metadata={"stable_key": "summary_a"}),
BackendProducedRef(ref_type=MemoryRefType.SESSION_ARCHIVE, metadata={"stable_key": "summary_b"}),
],
)
),
)
response = asyncio.run(
service.commit_session("sess_stable_key", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
event = next(event for event in repo.list_outbox_events_by_job(response.job_id) if event.backend_type == BackendType.OPENVIKING)
asyncio.run(service.process_outbox_event(event.id))
refs = repo.list_memory_refs(session_id="sess_stable_key", backend_type=BackendType.OPENVIKING, ref_type=MemoryRefType.SESSION_ARCHIVE)
assert len(refs) == 2
assert len({ref.id for ref in refs}) == 2
assert {ref.metadata["stable_key"] for ref in refs} == {"summary_a", "summary_b"}
def test_process_outbox_event_keeps_single_native_ref_fallback_compatible():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SUCCESS, native_id="ov_single", native_uri="viking://sessions/ov_single")
),
)
response = asyncio.run(
service.commit_session("sess_single_fallback", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
event = next(event for event in repo.list_outbox_events_by_job(response.job_id) if event.backend_type == BackendType.OPENVIKING)
asyncio.run(service.process_outbox_event(event.id))
refs = repo.list_memory_refs(session_id="sess_single_fallback", backend_type=BackendType.OPENVIKING, status=BackendRefStatus.SUCCESS)
assert len(refs) == 1
assert refs[0].ref_type == MemoryRefType.SESSION_ARCHIVE
assert refs[0].native_id == "ov_single"
def test_process_commit_job_one_success_one_failed_is_partial_success():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SUCCESS, native_id="ov_commit_1")
),
evermemos_client=FakeCommitEverMemOSClient(
commit_result(BackendType.EVERMEMOS, BackendResultStatus.FAILED, retryable=False, error_message="evermemos failed")
),
)
response = asyncio.run(
service.commit_session("sess_partial", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
job = asyncio.run(service.process_commit_job(response.job_id))
events = repo.list_outbox_events_by_job(response.job_id)
assert job.status.value == "partial_success"
assert job.created_refs_count == 1
assert "evermemos failed" in job.error_message
assert {event.status for event in events} == {OutboxEventStatus.SUCCESS, OutboxEventStatus.DEAD_LETTER}
def test_process_commit_job_two_failed_is_failed():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.FAILED, retryable=False, error_message="openviking failed")
),
evermemos_client=FakeCommitEverMemOSClient(
commit_result(BackendType.EVERMEMOS, BackendResultStatus.FAILED, retryable=False, error_message="evermemos failed")
),
)
response = asyncio.run(
service.commit_session("sess_failed", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
job = asyncio.run(service.process_commit_job(response.job_id))
assert job.status.value == "failed"
assert job.created_refs_count == 0
assert "openviking failed" in job.error_message
assert "evermemos failed" in job.error_message
def test_retryable_failed_outbox_event_requeues_with_next_retry():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.FAILED, retryable=True, error_message="temporary openviking failure")
),
)
response = asyncio.run(
service.commit_session("sess_retry", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
event = next(event for event in repo.list_outbox_events_by_job(response.job_id) if event.backend_type == BackendType.OPENVIKING)
updated = asyncio.run(service.process_outbox_event(event.id))
assert updated.status == OutboxEventStatus.PENDING
assert updated.attempt_count == 1
assert updated.next_retry_at is not None
assert "temporary openviking failure" in updated.last_error
def test_process_pending_outbox_events_processes_pending_batch():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SUCCESS, native_id="ov_commit_1")
),
evermemos_client=FakeCommitEverMemOSClient(
commit_result(BackendType.EVERMEMOS, BackendResultStatus.SUCCESS, native_id="em_commit_1")
),
)
asyncio.run(
service.commit_session("sess_batch", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
processed = asyncio.run(service.process_pending_outbox_events())
assert len(processed) == 2
assert {event.status for event in processed} == {OutboxEventStatus.SUCCESS}
assert len(repo.list_memory_refs(session_id="sess_batch", status=BackendRefStatus.SUCCESS)) == 2
def test_retryable_failed_outbox_event_exceeding_max_attempts_dead_letters():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.FAILED, retryable=True, error_message="still failing")
),
)
response = asyncio.run(
service.commit_session("sess_dead", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
event = next(event for event in repo.list_outbox_events_by_job(response.job_id) if event.backend_type == BackendType.OPENVIKING)
event.max_attempts = 1
repo.save_outbox_event(event)
updated = asyncio.run(service.process_outbox_event(event.id))
assert updated.status == OutboxEventStatus.DEAD_LETTER
assert updated.attempt_count == 1
assert updated.next_retry_at is None
def test_commit_pipeline_metadata_does_not_store_content_or_raw_request():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SUCCESS, native_id="ov_commit_1")
),
evermemos_client=FakeCommitEverMemOSClient(
commit_result(BackendType.EVERMEMOS, BackendResultStatus.SUCCESS, native_id="em_commit_1")
),
)
sensitive_content = "SECRET_COMMIT_PIPELINE_CONTENT_SHOULD_NOT_BE_STORED"
response = asyncio.run(
service.commit_session(
"sess_secure",
CommitRequest(
workspace_id="ws_1",
user_id="user_a",
agent_id="agent_cli",
metadata={"raw_request": {"content": sensitive_content}},
),
)
)
asyncio.run(service.process_commit_job(response.job_id))
for event in repo.list_outbox_events_by_job(response.job_id):
assert sensitive_content not in json.dumps(event.model_dump(mode="json"), ensure_ascii=False)
assert "raw_request" not in json.dumps(event.model_dump(mode="json"), ensure_ascii=False)
for ref in repo.list_memory_refs(session_id="sess_secure"):
assert sensitive_content not in json.dumps(ref.model_dump(mode="json"), ensure_ascii=False)
assert "raw_request" not in json.dumps(ref.model_dump(mode="json"), ensure_ascii=False)
def test_claim_pending_outbox_events_only_claims_due_pending_events():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(repo=repo)
response = asyncio.run(
service.commit_session("sess_claim", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
events = repo.list_outbox_events_by_job(response.job_id)
delayed = events[0]
delayed.next_retry_at = datetime.now(timezone.utc) + timedelta(minutes=5)
repo.save_outbox_event(delayed)
claimed = repo.claim_pending_outbox_events(limit=10, worker_id="worker_claim", lease_seconds=30)
assert len(claimed) == 1
assert claimed[0].id != delayed.id
assert claimed[0].status == OutboxEventStatus.PROCESSING
assert claimed[0].locked_by == "worker_claim"
assert claimed[0].lease_expires_at is not None
def test_next_retry_not_due_event_is_not_claimed():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(repo=repo)
response = asyncio.run(
service.commit_session("sess_retry_wait", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
for event in repo.list_outbox_events_by_job(response.job_id):
event.next_retry_at = datetime.now(timezone.utc) + timedelta(minutes=5)
repo.save_outbox_event(event)
claimed = repo.claim_pending_outbox_events(limit=10, worker_id="worker_wait", lease_seconds=30)
assert claimed == []
assert {event.status for event in repo.list_outbox_events_by_job(response.job_id)} == {OutboxEventStatus.PENDING}
def test_expired_processing_event_is_released_to_pending():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(repo=repo)
response = asyncio.run(
service.commit_session("sess_expired", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
claimed = repo.claim_pending_outbox_events(limit=1, worker_id="worker_old", lease_seconds=1)
assert len(claimed) == 1
released = repo.release_expired_processing_events(datetime.now(timezone.utc) + timedelta(seconds=2))
assert len(released) == 1
assert released[0].status == OutboxEventStatus.PENDING
assert released[0].locked_by is None
assert released[0].lease_expires_at is None
def test_process_pending_outbox_events_uses_claim_and_does_not_process_existing_lock():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SUCCESS, native_id="ov_claimed")
),
evermemos_client=FakeCommitEverMemOSClient(
commit_result(BackendType.EVERMEMOS, BackendResultStatus.SUCCESS, native_id="em_claimed")
),
)
response = asyncio.run(
service.commit_session("sess_no_double", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
externally_claimed = repo.claim_pending_outbox_events(limit=1, worker_id="worker_a", lease_seconds=300)[0]
processed = asyncio.run(service.process_pending_outbox_events(worker_id="worker_b"))
events = repo.list_outbox_events_by_job(response.job_id)
assert len(processed) == 1
assert sum(1 for event in events if event.status == OutboxEventStatus.SUCCESS) == 1
still_locked = next(event for event in events if event.id == externally_claimed.id)
assert still_locked.status == OutboxEventStatus.PROCESSING
assert still_locked.locked_by == "worker_a"
def test_terminal_outbox_statuses_clear_lock_fields():
repo = InMemoryRepository()
success_service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SUCCESS, native_id="ov_lock_clear")
),
evermemos_client=FakeCommitEverMemOSClient(
commit_result(BackendType.EVERMEMOS, BackendResultStatus.SKIPPED)
),
)
response = asyncio.run(
success_service.commit_session("sess_lock_clear", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
processed = asyncio.run(success_service.process_pending_outbox_events(worker_id="worker_lock"))
assert {event.status for event in processed} == {OutboxEventStatus.SUCCESS, OutboxEventStatus.SKIPPED}
assert all(event.locked_by is None for event in processed)
assert all(event.lease_expires_at is None for event in processed)
fail_service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.FAILED, retryable=False, error_message="fatal")
),
)
failed = asyncio.run(
fail_service.commit_session("sess_dead_lock", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
event = next(event for event in repo.list_outbox_events_by_job(failed.job_id) if event.backend_type == BackendType.OPENVIKING)
updated = asyncio.run(fail_service.process_outbox_event(event.id))
assert updated.status == OutboxEventStatus.DEAD_LETTER
assert updated.locked_by is None
assert updated.lease_expires_at is None
assert repo.list_outbox_events_by_job(response.job_id)
def test_retryable_failed_outbox_event_clears_lock_when_requeued():
repo = InMemoryRepository()
service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.FAILED, retryable=True, error_message="temporary")
),
)
response = asyncio.run(
service.commit_session("sess_retry_lock", CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"))
)
updated = asyncio.run(
service.process_outbox_event(
next(event.id for event in repo.list_outbox_events_by_job(response.job_id) if event.backend_type == BackendType.OPENVIKING)
)
)
assert updated.status == OutboxEventStatus.PENDING
assert updated.next_retry_at is not None
assert updated.locked_by is None
assert updated.lease_expires_at is None
def test_job_query_api_returns_job_status_and_outbox_summary(monkeypatch):
import memory_gateway.api_v2 as api_v2
repo = InMemoryRepository()
api_v2.v2_service = MemoryGatewayV2Service(repo=repo)
commit_response = asyncio.run(
api_v2.v2_service.commit_session(
"sess_job_api",
CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli", namespace="workspace/ws_1/user/user_a"),
)
)
app = FastAPI()
app.dependency_overrides[verify_api_key_compat] = lambda: None
app.include_router(api_v2.router)
async def get_request():
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
return await client.get(f"/v2/jobs/{commit_response.job_id}")
response = asyncio.run(asyncio.wait_for(get_request(), timeout=2))
assert response.status_code == 200
payload = response.json()
assert payload["job_id"] == commit_response.job_id
assert payload["status"] == "accepted"
assert payload["outbox_summary"]["total_events"] == 2
assert payload["outbox_summary"]["pending_events"] == 2
def test_admin_process_outbox_endpoint_triggers_pending_processing(monkeypatch):
import memory_gateway.api_v2 as api_v2
repo = InMemoryRepository()
api_v2.v2_service = MemoryGatewayV2Service(
repo=repo,
openviking_client_factory=fake_commit_openviking_factory(
commit_result(BackendType.OPENVIKING, BackendResultStatus.SUCCESS, native_id="ov_admin")
),
evermemos_client=FakeCommitEverMemOSClient(
commit_result(BackendType.EVERMEMOS, BackendResultStatus.SUCCESS, native_id="em_admin")
),
)
asyncio.run(
api_v2.v2_service.commit_session(
"sess_admin",
CommitRequest(workspace_id="ws_1", user_id="user_a", agent_id="agent_cli"),
)
)
app = FastAPI()
app.dependency_overrides[verify_api_key_compat] = lambda: None
app.include_router(api_v2.router)
async def post_request():
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
return await client.post("/v2/admin/outbox/process?limit=10&worker_id=test_worker")
response = asyncio.run(asyncio.wait_for(post_request(), timeout=2))
assert response.status_code == 200
payload = response.json()
assert payload["worker_id"] == "test_worker"
assert payload["processed_count"] == 2
assert payload["outbox_summary"]["success_events"] == 2
def test_worker_v2_cli_processes_once_and_prints_control_plane_summary(monkeypatch, capsys):
import memory_gateway.worker_v2 as worker_v2
class FakeWorkerService:
async def process_pending_outbox_events_summary(self, limit: int, worker_id: str, lease_seconds: int):
assert limit == 7
assert worker_id == "cli_worker"
assert lease_seconds == 45
return OutboxProcessResponse(
status=OperationStatus.SUCCESS,
worker_id=worker_id,
processed_count=2,
)
monkeypatch.setattr(worker_v2, "v2_service", FakeWorkerService())
exit_code = worker_v2.main(["--limit", "7", "--worker-id", "cli_worker", "--lease-seconds", "45"])
assert exit_code == 0
payload = json.loads(capsys.readouterr().out)
assert payload["worker_id"] == "cli_worker"
assert payload["processed_count"] == 2
assert "content" not in json.dumps(payload)
assert "raw_request" not in json.dumps(payload)
def test_v2_ingest_router_accepts_legal_request(monkeypatch):
import memory_gateway.api_v2 as api_v2
api_v2.v2_service = MemoryGatewayV2Service(
repo=InMemoryRepository(),
openviking_client_factory=fake_openviking_factory,
evermemos_client=FakeEverMemOSClient(),
)
app = FastAPI()
app.dependency_overrides[verify_api_key_compat] = lambda: None
app.include_router(api_v2.router)
async def post_request():
async with AsyncClient(transport=ASGITransport(app=app), base_url="http://test") as client:
return await client.post("/v2/conversations/ingest", json=build_ingest_payload(turn_id="turn_router"))
response = asyncio.run(asyncio.wait_for(post_request(), timeout=2))
assert response.status_code == 200
payload = response.json()
assert payload["turn_id"] == "turn_router"
assert len(payload["refs"]) == 2