Merge branch 'runtipi:master' into flaresolverr
This commit is contained in:
commit
9dd7f5272d
|
@ -5,18 +5,13 @@
|
|||
"exposable": true,
|
||||
"port": 8091,
|
||||
"id": "jellyfin",
|
||||
"tipi_version": 9,
|
||||
"version": "10.9.1",
|
||||
"categories": [
|
||||
"media"
|
||||
],
|
||||
"tipi_version": 10,
|
||||
"version": "10.9.2",
|
||||
"categories": ["media"],
|
||||
"description": "Jellyfin is a Free Software Media System that puts you in control of managing and streaming your media. It is an alternative to the proprietary Emby and Plex, to provide media from a dedicated server to end-user devices via multiple apps. Jellyfin is descended from Emby's 3.5.2 release and ported to the .NET Core framework to enable full cross-platform support. There are no strings attached, no premium licenses or features, and no hidden agendas: just a team who want to build something better and work together to achieve it. We welcome anyone who is interested in joining us in our quest!",
|
||||
"short_desc": "A media server for your home collection",
|
||||
"author": "jellyfin.org",
|
||||
"source": "https://github.com/jellyfin/jellyfin",
|
||||
"form_fields": [],
|
||||
"supported_architectures": [
|
||||
"arm64",
|
||||
"amd64"
|
||||
]
|
||||
"supported_architectures": ["arm64", "amd64"]
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
version: "3.7"
|
||||
services:
|
||||
jellyfin:
|
||||
image: lscr.io/linuxserver/jellyfin:10.9.1
|
||||
image: lscr.io/linuxserver/jellyfin:10.9.2
|
||||
container_name: jellyfin
|
||||
volumes:
|
||||
- ${APP_DATA_DIR}/data/config:/config
|
||||
|
|
|
@ -5,11 +5,9 @@
|
|||
"available": true,
|
||||
"exposable": true,
|
||||
"id": "nocodb",
|
||||
"tipi_version": 46,
|
||||
"version": "0.207.0",
|
||||
"categories": [
|
||||
"utilities"
|
||||
],
|
||||
"tipi_version": 47,
|
||||
"version": "0.207.2",
|
||||
"categories": ["utilities"],
|
||||
"description": "The Open Source Airtable Alternative. Turns any MySQL, PostgreSQL, SQL Server, SQLite & MariaDB into a smart-spreadsheet.",
|
||||
"short_desc": "Open Source Airtable Alternative",
|
||||
"author": "https://github.com/nocodb",
|
||||
|
@ -41,8 +39,5 @@
|
|||
"env_variable": "NOCODB_TABLE_ROWS"
|
||||
}
|
||||
],
|
||||
"supported_architectures": [
|
||||
"arm64",
|
||||
"amd64"
|
||||
]
|
||||
"supported_architectures": ["arm64", "amd64"]
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ services:
|
|||
- NC_AUTH_JWT_SECRET=${NOCODB_JWT_SECRET}
|
||||
- NC_REDIS_URL=redis://default:${NOCODB_REDIS_PASSWORD}@nocodb-redis:6379
|
||||
- DB_QUERY_LIMIT_DEFAULT=${NOCODB_TABLE_ROWS-25}
|
||||
image: "nocodb/nocodb:0.207.0"
|
||||
image: "nocodb/nocodb:0.207.2"
|
||||
ports:
|
||||
- "${APP_PORT}:8080"
|
||||
restart: always
|
||||
|
|
|
@ -5,19 +5,14 @@
|
|||
"exposable": true,
|
||||
"port": 11434,
|
||||
"id": "ollama-amd",
|
||||
"tipi_version": 2,
|
||||
"version": "0.1.37-rocm",
|
||||
"categories": [
|
||||
"ai"
|
||||
],
|
||||
"tipi_version": 3,
|
||||
"version": "0.1.38-rocm",
|
||||
"categories": ["ai"],
|
||||
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
|
||||
"short_desc": "LLMs inference server with OpenAI compatible API",
|
||||
"author": "ollama",
|
||||
"source": "https://github.com/ollama/ollama",
|
||||
"website": "https://ollama.com",
|
||||
"form_fields": [],
|
||||
"supported_architectures": [
|
||||
"arm64",
|
||||
"amd64"
|
||||
]
|
||||
"supported_architectures": ["arm64", "amd64"]
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ version: '3.7'
|
|||
|
||||
services:
|
||||
ollama-amd:
|
||||
image: ollama/ollama:0.1.37-rocm
|
||||
image: ollama/ollama:0.1.38-rocm
|
||||
restart: unless-stopped
|
||||
container_name: ollama-amd
|
||||
environment:
|
||||
|
|
|
@ -5,19 +5,14 @@
|
|||
"exposable": true,
|
||||
"port": 11436,
|
||||
"id": "ollama-cpu",
|
||||
"tipi_version": 2,
|
||||
"version": "0.1.37",
|
||||
"categories": [
|
||||
"ai"
|
||||
],
|
||||
"tipi_version": 3,
|
||||
"version": "0.1.38",
|
||||
"categories": ["ai"],
|
||||
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
|
||||
"short_desc": "LLMs inference server with OpenAI compatible API",
|
||||
"author": "ollama",
|
||||
"source": "https://github.com/ollama/ollama",
|
||||
"website": "https://ollama.com",
|
||||
"form_fields": [],
|
||||
"supported_architectures": [
|
||||
"arm64",
|
||||
"amd64"
|
||||
]
|
||||
"supported_architectures": ["arm64", "amd64"]
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ version: '3.7'
|
|||
|
||||
services:
|
||||
ollama-cpu:
|
||||
image: ollama/ollama:0.1.37
|
||||
image: ollama/ollama:0.1.38
|
||||
restart: unless-stopped
|
||||
container_name: ollama-cpu
|
||||
ports:
|
||||
|
|
|
@ -5,19 +5,14 @@
|
|||
"exposable": true,
|
||||
"port": 11435,
|
||||
"id": "ollama-nvidia",
|
||||
"tipi_version": 2,
|
||||
"version": "0.1.37",
|
||||
"categories": [
|
||||
"ai"
|
||||
],
|
||||
"tipi_version": 3,
|
||||
"version": "0.1.38",
|
||||
"categories": ["ai"],
|
||||
"description": "Get up and running with Llama 3, Mistral, Gemma, and other large language models.",
|
||||
"short_desc": "LLMs inference server with OpenAI compatible API",
|
||||
"author": "ollama",
|
||||
"source": "https://github.com/ollama/ollama",
|
||||
"website": "https://ollama.com",
|
||||
"form_fields": [],
|
||||
"supported_architectures": [
|
||||
"arm64",
|
||||
"amd64"
|
||||
]
|
||||
"supported_architectures": ["arm64", "amd64"]
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ version: '3.7'
|
|||
|
||||
services:
|
||||
ollama-nvidia:
|
||||
image: ollama/ollama:0.1.37
|
||||
image: ollama/ollama:0.1.38
|
||||
restart: unless-stopped
|
||||
container_name: ollama-nvidia
|
||||
ports:
|
||||
|
|
|
@ -5,19 +5,13 @@
|
|||
"exposable": true,
|
||||
"port": 8112,
|
||||
"id": "readarr",
|
||||
"tipi_version": 18,
|
||||
"version": "0.3.27-nightly",
|
||||
"categories": [
|
||||
"books",
|
||||
"media"
|
||||
],
|
||||
"tipi_version": 19,
|
||||
"version": "0.3.28-nightly",
|
||||
"categories": ["books", "media"],
|
||||
"description": "",
|
||||
"short_desc": "Book Manager and Automation (Sonarr for Ebooks)",
|
||||
"author": "readarr.com",
|
||||
"source": "https://github.com/Readarr/Readarr",
|
||||
"form_fields": [],
|
||||
"supported_architectures": [
|
||||
"arm64",
|
||||
"amd64"
|
||||
]
|
||||
"supported_architectures": ["arm64", "amd64"]
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
version: "3.7"
|
||||
services:
|
||||
readarr:
|
||||
image: lscr.io/linuxserver/readarr:0.3.27-nightly
|
||||
image: lscr.io/linuxserver/readarr:0.3.28-nightly
|
||||
container_name: readarr
|
||||
environment:
|
||||
- PUID=1000
|
||||
|
|
Loading…
Reference in New Issue
Block a user