apps/ollama-cpu/config.json hinzugefügt

This commit is contained in:
tipi 2025-08-11 10:37:41 +00:00
parent 6a9006d2ff
commit 72236d5ec1

View File

@ -0,0 +1,21 @@
{
"$schema": "../app-info-schema.json",
"name": "Ollama - CPU",
"available": true,
"exposable": true,
"dynamic_config": true,
"port": 11436,
"id": "ollama-cpu",
"tipi_version": 68,
"version": "v0.11.4",
"categories": ["ai"],
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
"short_desc": "LLMs inference server with OpenAI compatible API",
"author": "ollama",
"source": "https://github.com/ollama/ollama",
"website": "https://ollama.com",
"form_fields": [],
"supported_architectures": ["arm64", "amd64"],
"created_at": 1691943801422,
"updated_at": 1752087893619
}