File size: 1,123 Bytes
ba18ff2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
#!/usr/bin/env python3
"""
DeepSeek MCP Server
A Model Context Protocol server that provides access to a local DeepSeek 7B model.
"""
import asyncio
import os
import sys
from pathlib import Path
from mcp_interface import MCPLLMInterface
def main():
"""Main entry point for the MCP server"""
# Get the model path
model_dir = Path(__file__).parent / "models"
model_path = model_dir / "deepseek-llm-7b-chat-Q6_K.gguf"
if not model_path.exists():
print(f"Error: Model file not found at {model_path}", file=sys.stderr)
print("Please ensure the DeepSeek model is placed in the models/ directory.", file=sys.stderr)
sys.exit(1)
print(f"Loading model from: {model_path}", file=sys.stderr)
# Create and run the MCP interface
interface = MCPLLMInterface(str(model_path))
try:
asyncio.run(interface.run())
except KeyboardInterrupt:
print("\nServer stopped by user", file=sys.stderr)
except Exception as e:
print(f"Error running server: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main() |