Files
memory-gateway/pipeline/jobs/ingest_case.py

42 lines
1.4 KiB
Python

"""Batch-ingest mock case files and emit normalized case JSON documents."""
from __future__ import annotations
import json
from dataclasses import asdict
from pathlib import Path
from pipeline.transforms.normalize_case import load_and_normalize_case
def ingest_cases(input_dir: str | Path, output_dir: str | Path) -> list[Path]:
input_dir = Path(input_dir)
output_dir = Path(output_dir)
output_dir.mkdir(parents=True, exist_ok=True)
written: list[Path] = []
for src in sorted(input_dir.rglob("*.json")):
normalized = load_and_normalize_case(src)
dest = output_dir / f"{normalized.id}.json"
with dest.open("w", encoding="utf-8") as f:
json.dump(asdict(normalized), f, ensure_ascii=False, indent=2)
written.append(dest)
return written
def main() -> None:
import argparse
parser = argparse.ArgumentParser(description="Normalize a directory of mock case JSON files.")
parser.add_argument("--input-dir", default="evaluation/datasets/mock_cases", help="Directory containing raw mock case files")
parser.add_argument("--output-dir", default="evaluation/datasets/normalized_cases", help="Directory to write normalized case files")
args = parser.parse_args()
written = ingest_cases(args.input_dir, args.output_dir)
print(f"normalized_cases={len(written)}")
for path in written:
print(path)
if __name__ == "__main__":
main()