Merge branch 'main' of https://huggingface.co/rmtlabs/my-kai-model
Browse files- .gitattributes +1 -0
- .idea/.gitignore +8 -0
- .idea/workspace.xml +8 -0
- README.md +5 -0
- config/actions.py +3 -0
- config/config.yml +13 -0
- data/RMT-LABS.pdf +0 -0
- kai-model-7.2B-Q4_0.gguf +3 -0
- main.py +1 -1
.gitattributes
CHANGED
@@ -2,3 +2,4 @@
|
|
2 |
*.pt filter=lfs diff=lfs merge=lfs -text
|
3 |
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
4 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
|
|
|
2 |
*.pt filter=lfs diff=lfs merge=lfs -text
|
3 |
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
4 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
5 |
+
*.gguf filter=lfs diff=lfs merge=lfs -text
|
.idea/.gitignore
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Default ignored files
|
2 |
+
/shelf/
|
3 |
+
/workspace.xml
|
4 |
+
# Editor-based HTTP Client requests
|
5 |
+
/httpRequests/
|
6 |
+
# Datasource local storage ignored files
|
7 |
+
/dataSources/
|
8 |
+
/dataSources.local.xml
|
.idea/workspace.xml
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
+
<project version="4">
|
3 |
+
<component name="PropertiesComponent">{
|
4 |
+
"keyToString": {
|
5 |
+
"settings.editor.selected.configurable": "advanced.settings"
|
6 |
+
}
|
7 |
+
}</component>
|
8 |
+
</project>
|
README.md
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
pipeline_tag: text-generation
|
3 |
+
base_model:
|
4 |
+
- mistralai/Mistral-7B-v0.1
|
5 |
+
---
|
config/actions.py
CHANGED
@@ -8,8 +8,11 @@ from llama_index.core.base.response.schema import StreamingResponse
|
|
8 |
import traceback
|
9 |
import logging
|
10 |
|
|
|
11 |
|
12 |
|
|
|
|
|
13 |
# Set up logging
|
14 |
logging.basicConfig(level=logging.INFO)
|
15 |
logger = logging.getLogger(__name__)
|
|
|
8 |
import traceback
|
9 |
import logging
|
10 |
|
11 |
+
<<<<<<< HEAD
|
12 |
|
13 |
|
14 |
+
=======
|
15 |
+
>>>>>>> 3f9ef9d356d6a3a3fbfa4fd1887f2a716f06a0fc
|
16 |
# Set up logging
|
17 |
logging.basicConfig(level=logging.INFO)
|
18 |
logger = logging.getLogger(__name__)
|
config/config.yml
CHANGED
@@ -1,10 +1,19 @@
|
|
1 |
models:
|
2 |
- type: main
|
|
|
3 |
engine: openai
|
4 |
model: kai-model:latest # Use your actual model name
|
5 |
parameters:
|
6 |
openai_api_base: http://localhost:8001/v1
|
7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
instructions:
|
10 |
- type: general
|
@@ -31,11 +40,15 @@ sample_conversation: |
|
|
31 |
rails:
|
32 |
input:
|
33 |
flows:
|
|
|
34 |
- jailbreak detection heuristics
|
35 |
config:
|
36 |
jailbreak_detection:
|
37 |
length_per_perplexity_threshold: 90
|
38 |
prefix_suffix_perplexity_threshold: 1850
|
|
|
|
|
|
|
39 |
|
40 |
output:
|
41 |
flows:
|
|
|
1 |
models:
|
2 |
- type: main
|
3 |
+
<<<<<<< HEAD
|
4 |
engine: openai
|
5 |
model: kai-model:latest # Use your actual model name
|
6 |
parameters:
|
7 |
openai_api_base: http://localhost:8001/v1
|
8 |
|
9 |
+
=======
|
10 |
+
engine: ollama
|
11 |
+
model: kai-model:latest # Use your actual model name
|
12 |
+
parameters:
|
13 |
+
base_url: http://127.0.0.1:11434
|
14 |
+
temperature: 0.3
|
15 |
+
top_p: 0.9
|
16 |
+
>>>>>>> 3f9ef9d356d6a3a3fbfa4fd1887f2a716f06a0fc
|
17 |
|
18 |
instructions:
|
19 |
- type: general
|
|
|
40 |
rails:
|
41 |
input:
|
42 |
flows:
|
43 |
+
<<<<<<< HEAD
|
44 |
- jailbreak detection heuristics
|
45 |
config:
|
46 |
jailbreak_detection:
|
47 |
length_per_perplexity_threshold: 90
|
48 |
prefix_suffix_perplexity_threshold: 1850
|
49 |
+
=======
|
50 |
+
- self check input
|
51 |
+
>>>>>>> 3f9ef9d356d6a3a3fbfa4fd1887f2a716f06a0fc
|
52 |
|
53 |
output:
|
54 |
flows:
|
data/RMT-LABS.pdf
ADDED
Binary file (92.1 kB). View file
|
|
kai-model-7.2B-Q4_0.gguf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a7d036ec49164704ef767d1e31914efc196bf8b5999318d6735cef2c33fed687
|
3 |
+
size 4113300746
|
main.py
CHANGED
@@ -81,4 +81,4 @@ def health():
|
|
81 |
|
82 |
if __name__ == "__main__":
|
83 |
import uvicorn
|
84 |
-
uvicorn.run(app, host = "0.0.0.0", port = 8000)
|
|
|
81 |
|
82 |
if __name__ == "__main__":
|
83 |
import uvicorn
|
84 |
+
uvicorn.run(app, host = "0.0.0.0", port = 8000)
|