Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
148 changes: 74 additions & 74 deletions frontend/models.json
Original file line number Diff line number Diff line change
@@ -1,137 +1,137 @@
{
"8GB": {
"GENERAL": [
"llama3.2:3b",
"orca-mini:7b",
"llama2:3b",
"orca-mini:3b",
"mistral:7b"
],
"RESEARCH": [
"gemma:2b-q4",
"gemma2:2b-q4",
"deepseek-r1:7b-q4"
"gemma:2b",
"phi:2.7b",
"neural-chat:7b"
],
"REASONING": [
"deepseek-r1:7b-q4",
"deepseek-v3:7b-q4",
"qwen2.5:7b-q4"
"neural-chat:7b",
"mistral:7b",
"llama2:7b"
],
"CODING": [
"codellama:7b-q4",
"qwen2.5-coder:7b-q4",
"deepseek-coder:7b-q4"
"codellama:7b",
"deepseek-coder:6.7b",
"mistral:7b"
],
"VISION": [
"llava:7b-q4",
"llama3.2-vision:7b-q4",
"llava-llama3:7b-q4"
"llava:7b",
"bakllava:7b",
"llava-llama2:7b"
]
},
"16GB": {
"GENERAL": [
"llama3.2:7b-q8",
"mistral:7b-q8",
"phi4:14b-q4"
"llama2:7b",
"mistral:7b",
"phi:2.7b"
],
"RESEARCH": [
"gemma:7b-q4",
"gemma2:7b-q4",
"deepseek-r1:7b-q8"
"gemma:7b",
"phi:2.7b",
"neural-chat:7b"
],
"REASONING": [
"deepseek-r1:7b-q8",
"deepseek-v3:7b-q8",
"qwen2.5:7b-q8"
"neural-chat:7b",
"mistral:7b",
"llama2:7b"
],
"CODING": [
"codellama:7b-q8",
"qwen2.5-coder:7b-q8",
"deepseek-coder-v2:7b-q4"
"codellama:7b",
"deepseek-coder:6.7b",
"mistral:7b"
],
"VISION": [
"llava:7b-q8",
"llama3.2-vision:7b-q8",
"llava-llama3:7b-q8"
"llava:7b",
"bakllava:7b",
"llava-llama2:7b"
]
},
"32GB": {
"GENERAL": [
"llama3.3:13b-q8",
"phi4:32b-q4",
"mistral:14b-q8"
"llama2:13b",
"phi:2.7b",
"mistral:7b"
],
"RESEARCH": [
"gemma:7b-q8",
"gemma2:7b-q8",
"deepseek-r1:14b-q8"
"gemma:7b",
"phi:2.7b",
"neural-chat:7b"
],
"REASONING": [
"deepseek-r1:14b-q8",
"deepseek-v3:14b-q8",
"qwen2.5:14b-q8"
"neural-chat:7b",
"mistral:7b",
"llama2:13b"
],
"CODING": [
"codellama:13b-q8",
"qwen2.5-coder:13b-q8",
"deepseek-coder:13b-q8"
"codellama:13b",
"deepseek-coder:6.7b",
"mistral:7b"
],
"VISION": [
"llava:13b-q8",
"llama3.2-vision:13b-q8",
"llava-llama3:13b-q8"
"llava:13b",
"bakllava:7b",
"llava-llama2:13b"
]
},
"64GB": {
"GENERAL": [
"llama3.3:70b-q4",
"phi4:70b-q4",
"qwen2.5:70b-q4"
"llama2:70b",
"phi:2.7b",
"mistral:7b"
],
"RESEARCH": [
"gemma:70b-q4",
"gemma2:70b-q4",
"mixtral:70b-q4"
"gemma:7b",
"mixtral:8x7b",
"neural-chat:7b"
],
"REASONING": [
"deepseek-r1:70b-q4",
"deepseek-v3:70b-q4",
"mistral:70b-q4"
"neural-chat:7b",
"mistral:7b",
"llama2:70b"
],
"CODING": [
"codellama:70b-q4",
"qwen2.5-coder:70b-q4",
"deepseek-coder:70b-q4"
"codellama:34b",
"deepseek-coder:33b",
"mistral:7b"
],
"VISION": [
"llava:70b-q4",
"llama3.2-vision:70b-q4",
"llava-llama3:70b-q4"
"llava:13b",
"bakllava:7b",
"llava-llama2:13b"
]
},
"128GB": {
"GENERAL": [
"llama3.3:70b-q8",
"phi4:110b-q8",
"mistral:110b-q8"
"llama2:70b",
"phi:2.7b",
"mistral:7b"
],
"RESEARCH": [
"gemma:110b-q8",
"gemma2:110b-q8",
"mixtral:110b-q8"
"gemma:7b",
"mixtral:8x7b",
"neural-chat:7b"
],
"REASONING": [
"deepseek-r1:110b-q8",
"deepseek-v3:110b-q8",
"qwen2.5:110b-q8"
"neural-chat:7b",
"mistral:7b",
"llama2:70b"
],
"CODING": [
"codellama:110b-q8",
"qwen2.5-coder:110b-q8",
"deepseek-coder:110b-q8"
"codellama:34b",
"deepseek-coder:33b",
"mistral:7b"
],
"VISION": [
"llava:110b-q8",
"llama3.2-vision:110b-q8",
"llava-llama3:110b-q8"
"llava:13b",
"bakllava:7b",
"llava-llama2:13b"
]
}
}