File size: 4,167 Bytes
ba18ff2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
#!/usr/bin/env python3
"""
Test script for DeepSeek MCP Server

This script tests the MCP server functionality by simulating MCP client interactions.
"""

import asyncio
import json
import sys
import logging
from pathlib import Path
from typing import Dict, Any

# Setup logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

async def test_mcp_interface():
    """Test the MCP interface directly"""
    try:
        from mcp_interface import MCPLLMInterface
        from config import config
        
        logger.info("Testing MCP Interface...")
        logger.info(f"Configuration:\n{config}")
        
        # Validate configuration
        config.validate()
        logger.info("Configuration validation passed")
        
        # Create interface
        interface = MCPLLMInterface(config.model_path)
        
        # Test model loading
        logger.info("Testing model loading...")
        await interface._load_model()
        logger.info("Model loaded successfully")
        
        # Test chat functionality
        logger.info("Testing chat functionality...")
        chat_args = {
            "message": "Hello! Can you tell me a short joke?",
            "max_tokens": 100,
            "temperature": 0.7
        }
        
        chat_response = await interface._handle_chat(chat_args)
        logger.info(f"Chat response: {chat_response[0].text}")
        
        # Test generation functionality
        logger.info("Testing text generation...")
        gen_args = {
            "prompt": "The future of AI is",
            "max_tokens": 50,
            "temperature": 0.8
        }
        
        gen_response = await interface._handle_generate(gen_args)
        logger.info(f"Generation response: {gen_response[0].text}")
        
        logger.info("All tests passed successfully!")
        return True
        
    except Exception as e:
        logger.error(f"Test failed: {e}")
        return False

async def test_model_file():
    """Test if model file exists and is accessible"""
    try:
        from config import config
        
        model_path = Path(config.model_path)
        
        if not model_path.exists():
            logger.error(f"Model file not found: {model_path}")
            logger.error("Please ensure the DeepSeek model is placed in the models/ directory")
            return False
        
        size_gb = model_path.stat().st_size / (1024**3)
        logger.info(f"Model file found: {model_path} ({size_gb:.2f} GB)")
        
        if size_gb < 1:
            logger.warning("Model file seems small. This might not be a valid model file.")
        
        return True
        
    except Exception as e:
        logger.error(f"Model file test failed: {e}")
        return False

async def test_dependencies():
    """Test if all required dependencies are available"""
    try:
        logger.info("Testing dependencies...")
        
        # Test core dependencies
        import mcp
        import llama_cpp
        import pydantic
        logger.info("✓ Core dependencies available")
        
        # Test llama-cpp-python GPU support
        try:
            from llama_cpp import llama_cpp
            logger.info("✓ llama-cpp-python with GPU support available")
        except:
            logger.warning("⚠ GPU support might not be available")
        
        return True
        
    except ImportError as e:
        logger.error(f"Missing dependency: {e}")
        logger.error("Please install dependencies: pip install -r requirements.txt")
        return False

async def main():
    """Main test function"""
    logger.info("Starting DeepSeek MCP Server Tests...")
    
    # Test dependencies
    if not await test_dependencies():
        sys.exit(1)
    
    # Test model file
    if not await test_model_file():
        sys.exit(1)
    
    # Test MCP interface
    if not await test_mcp_interface():
        sys.exit(1)
    
    logger.info("🎉 All tests completed successfully!")
    logger.info("Your MCP server is ready to use!")

if __name__ == "__main__":
    asyncio.run(main())