#!/usr/bin/env python3 """ DeepSeek MCP Server A Model Context Protocol server that provides access to a local DeepSeek 7B model. """ import asyncio import os import sys from pathlib import Path from mcp_interface import MCPLLMInterface def main(): """Main entry point for the MCP server""" # Get the model path model_dir = Path(__file__).parent / "models" model_path = model_dir / "deepseek-llm-7b-chat-Q6_K.gguf" if not model_path.exists(): print(f"Error: Model file not found at {model_path}", file=sys.stderr) print("Please ensure the DeepSeek model is placed in the models/ directory.", file=sys.stderr) sys.exit(1) print(f"Loading model from: {model_path}", file=sys.stderr) # Create and run the MCP interface interface = MCPLLMInterface(str(model_path)) try: asyncio.run(interface.run()) except KeyboardInterrupt: print("\nServer stopped by user", file=sys.stderr) except Exception as e: print(f"Error running server: {e}", file=sys.stderr) sys.exit(1) if __name__ == "__main__": main()