File size: 3,087 Bytes
ba18ff2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
"""
Configuration settings for DeepSeek MCP Server
"""

import os
from pathlib import Path
from typing import Optional

class MCPConfig:
    """Configuration class for MCP Server"""
    
    def __init__(self):
        # Model configuration
        self.model_path = self._get_model_path()
        self.n_ctx = int(os.getenv('MCP_CONTEXT_SIZE', '4096'))
        self.n_gpu_layers = int(os.getenv('MCP_GPU_LAYERS', '35'))
        self.n_threads = int(os.getenv('MCP_THREADS', '8'))
        self.n_batch = int(os.getenv('MCP_BATCH_SIZE', '512'))
        
        # Generation defaults
        self.default_max_tokens = int(os.getenv('MCP_DEFAULT_MAX_TOKENS', '512'))
        self.default_temperature = float(os.getenv('MCP_DEFAULT_TEMPERATURE', '0.7'))
        self.default_top_p = float(os.getenv('MCP_DEFAULT_TOP_P', '0.9'))
        self.default_repeat_penalty = float(os.getenv('MCP_DEFAULT_REPEAT_PENALTY', '1.1'))
        
        # Server configuration
        self.server_name = os.getenv('MCP_SERVER_NAME', 'deepseek-mcp-server')
        self.server_version = os.getenv('MCP_SERVER_VERSION', '1.0.0')
        
        # Logging
        self.log_level = os.getenv('MCP_LOG_LEVEL', 'INFO')
        self.log_file = os.getenv('MCP_LOG_FILE', 'mcp_server.log')
        
        # Performance settings
        self.use_mlock = os.getenv('MCP_USE_MLOCK', 'true').lower() == 'true'
        self.low_vram = os.getenv('MCP_LOW_VRAM', 'false').lower() == 'true'
    
    def _get_model_path(self) -> str:
        """Get model path from environment or default location"""
        env_path = os.getenv('MCP_MODEL_PATH')
        if env_path:
            return env_path
        
        # Default path
        default_path = Path(__file__).parent / "models" / "deepseek-llm-7b-chat-Q6_K.gguf"
        return str(default_path)
    
    def validate(self) -> bool:
        """Validate configuration settings"""
        model_path = Path(self.model_path)
        if not model_path.exists():
            raise FileNotFoundError(f"Model file not found: {self.model_path}")
        
        if self.n_ctx < 512:
            raise ValueError("Context size must be at least 512")
        
        if self.n_gpu_layers < 0:
            raise ValueError("GPU layers must be non-negative")
        
        if not (0.0 <= self.default_temperature <= 2.0):
            raise ValueError("Temperature must be between 0.0 and 2.0")
        
        if not (0.0 <= self.default_top_p <= 1.0):
            raise ValueError("Top-p must be between 0.0 and 1.0")
        
        return True
    
    def __str__(self) -> str:
        """String representation of configuration"""
        return f"""DeepSeek MCP Server Configuration:
  Model Path: {self.model_path}
  Context Size: {self.n_ctx}
  GPU Layers: {self.n_gpu_layers}
  Threads: {self.n_threads}
  Batch Size: {self.n_batch}
  Default Max Tokens: {self.default_max_tokens}
  Default Temperature: {self.default_temperature}
  Server Name: {self.server_name}
  Server Version: {self.server_version}"""

# Global configuration instance
config = MCPConfig()