File size: 1,508 Bytes
ba18ff2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
#!/usr/bin/env python3
"""
Example of how to use the DeepSeek MCP Server programmatically
"""
import asyncio
from mcp_interface import MCPLLMInterface
from config import config
async def example_usage():
"""Example of using the MCP interface directly"""
print("Creating MCP interface...")
interface = MCPLLMInterface(config.model_path)
# Load the model
print("Loading model...")
await interface._load_model()
print("Model loaded successfully!")
# Example 1: Chat functionality
print("\n=== Chat Example ===")
chat_args = {
"message": "Write a simple Python function to add two numbers",
"max_tokens": 200,
"temperature": 0.7
}
response = await interface._handle_chat(chat_args)
print("Chat Response:")
print(response[0].text)
# Example 2: Text generation
print("\n=== Generation Example ===")
gen_args = {
"prompt": "The benefits of using local AI models include:",
"max_tokens": 150,
"temperature": 0.8
}
response = await interface._handle_generate(gen_args)
print("Generated Text:")
print(response[0].text)
def main():
"""Run the examples"""
print("DeepSeek MCP Server Usage Examples")
print("=" * 50)
try:
asyncio.run(example_usage())
print("\n" + "=" * 50)
print("Examples completed successfully!")
except Exception as e:
print(f"Error: {e}")
if __name__ == "__main__":
main()
|