import asyncio import os import logging from dotenv import load_dotenv from livekit.agents.llm import ChatContext from livekit.plugins import openai # Configure logging to see what's happening logging.basicConfig(level=logging.INFO) logger = logging.getLogger("test-minimax") async def test_minimax(): print("Loading .env...") load_dotenv() # Configuration from environment or defaults from custom_agent.py MINIMAX_BASE_URL = os.getenv("MINIMAX_LLM_BASE_URL", "https://oai.bwgdi.com/v1") MINIMAX_MODEL = os.getenv("MINIMAX_LLM_MODEL", "MiniMaxAI") # Using the hardcoded key from custom_agent.py as a fallback if not in .env API_KEY = os.getenv("MINIMAX_API_KEY", "sk-orez64WkG1NkfksB5j_hGA") import httpx from openai import AsyncClient as OpenAIAsyncClient print(f"Connecting to Minimax at {MINIMAX_BASE_URL} using model {MINIMAX_MODEL}") # Create a custom HTTP client that disables SSL verification http_client = httpx.AsyncClient(verify=False) # Create the OpenAI AsyncClient with the custom HTTP client openai_client = OpenAIAsyncClient( api_key=API_KEY, base_url=MINIMAX_BASE_URL, http_client=http_client, ) llm = openai.LLM( model=MINIMAX_MODEL, client=openai_client, ) print("Creating ChatContext...") chat_ctx = ChatContext() chat_ctx.add_message( content="Hello! Can you introduce yourself? Please reply in Chinese.", role="user", ) print(f"\n--- Testing Streaming Chat ---") print(f"Request: {chat_ctx.items[-1].content}") print("Response: ", end="", flush=True) try: print("\nCalling llm.chat()...") stream = llm.chat(chat_ctx=chat_ctx) print("Iterating over stream...") async for chunk in stream: if chunk.delta and chunk.delta.content: print(chunk.delta.content, end="", flush=True) print("\n--- Test Completed Successfully ---") except Exception as e: logger.error(f"\nTest failed with error: {e}") if __name__ == "__main__": print("Starting...") try: asyncio.run(asyncio.wait_for(test_minimax(), timeout=30)) except asyncio.TimeoutError: print("\nTest timed out after 30 seconds.") except Exception as e: print(f"\nAn error occurred: {e}")