mirror of
https://github.com/nextcloud/all-in-one.git
synced 2026-02-05 21:37:58 +00:00
feat: update local AI configuration for Vulkan support and enhance documentation
Signed-off-by: Jean-Yves <7360784+docjyJ@users.noreply.github.com>
This commit is contained in:
parent
5d30f0f82d
commit
808ff2f76e
4 changed files with 26 additions and 98 deletions
|
|
@ -1,50 +0,0 @@
|
|||
{
|
||||
"aio_services_v1": [
|
||||
{
|
||||
"container_name": "nextcloud-aio-local-ai-vulkan",
|
||||
"display_name": "Local AI for Vulkan (x86 only)",
|
||||
"documentation": "https://github.com/nextcloud/all-in-one/tree/main/community-containers/local-ai-vulkan",
|
||||
"image": "ghcr.io/docjyj/aio-local-ai-vulkan",
|
||||
"image_tag": "v0",
|
||||
"internal_port": "8080",
|
||||
"restart": "unless-stopped",
|
||||
"environment": [
|
||||
"TZ=%TIMEZONE%",
|
||||
"LOCAL_AI_PASS=%LOCAL_AI_PASS%"
|
||||
],
|
||||
"ports": [
|
||||
{
|
||||
"ip_binding": "%APACHE_IP_BINDING%",
|
||||
"port_number": "10078",
|
||||
"protocol": "tcp"
|
||||
}
|
||||
],
|
||||
"volumes": [
|
||||
{
|
||||
"source": "nextcloud_aio_localai_vulkan_models",
|
||||
"destination": "/models",
|
||||
"writeable": true
|
||||
},
|
||||
{
|
||||
"source": "nextcloud_aio_localai_vulkan_backends",
|
||||
"destination": "/backends",
|
||||
"writeable": true
|
||||
}
|
||||
],
|
||||
"secrets": [
|
||||
"LOCAL_AI_PASS"
|
||||
],
|
||||
"ui_secret": "LOCAL_AI_PASS",
|
||||
"devices": [
|
||||
"/dev/dri"
|
||||
],
|
||||
"nextcloud_exec_commands": [
|
||||
"php /var/www/html/occ app:install integration_openai",
|
||||
"php /var/www/html/occ app:enable integration_openai",
|
||||
"php /var/www/html/occ config:app:set integration_openai url --value http://nextcloud-aio-local-ai-vulkan:8080",
|
||||
"php /var/www/html/occ app:install assistant",
|
||||
"php /var/www/html/occ app:enable assistant"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
|
||||
> [!WARNING]
|
||||
> ARM devices are not supported by this container. It is only for x86_64 devices. (See: https://github.com/mudler/LocalAI/issues/5778)
|
||||
|
||||
## Local AI with Vulkan support
|
||||
This container bundles Local AI and auto-configures it for you. It support hardware acceleration with Vulkan.
|
||||
|
||||
### Notes
|
||||
Documentation is available on the container repository. This documentation is regularly updated and is intended to be as simple and detailed as possible. Thanks for all your feedback!
|
||||
|
||||
- See https://github.com/docjyJ/aio-local-ai-vulkan#getting-started for getting start with this container.
|
||||
- See [this guide](https://github.com/nextcloud/all-in-one/discussions/5430) for how to improve AI task pickup speed
|
||||
- See https://github.com/nextcloud/all-in-one/tree/main/community-containers#community-containers how to add it to the AIO stack
|
||||
- Note that Nextcloud supports only one server for AI queries, so this container cannot be used at the same time as other AI containers.
|
||||
|
||||
### Repository
|
||||
https://github.com/docjyJ/aio-local-ai-vulkan
|
||||
|
||||
### Maintainer
|
||||
https://github.com/docjyJ
|
||||
|
|
@ -3,14 +3,21 @@
|
|||
{
|
||||
"container_name": "nextcloud-aio-local-ai",
|
||||
"display_name": "Local AI",
|
||||
"documentation": "https://github.com/nextcloud/all-in-one/tree/main/community-containers/local-ai",
|
||||
"image": "ghcr.io/szaimen/aio-local-ai",
|
||||
"image_tag": "v2",
|
||||
"documentation": "https://github.com/nextcloud/all-in-one/tree/main/community-containers/local-ai-vulkan",
|
||||
"image": "ghcr.io/docjyj/aio-local-ai-vulkan",
|
||||
"image_tag": "v0",
|
||||
"internal_port": "8080",
|
||||
"restart": "unless-stopped",
|
||||
"environment": [
|
||||
"TZ=%TIMEZONE%",
|
||||
"MODELS_PATH=/models"
|
||||
"LOCAL_AI_PASS=%LOCAL_AI_PASS%"
|
||||
],
|
||||
"ports": [
|
||||
{
|
||||
"ip_binding": "%APACHE_IP_BINDING%",
|
||||
"port_number": "10078",
|
||||
"protocol": "tcp"
|
||||
}
|
||||
],
|
||||
"volumes": [
|
||||
{
|
||||
|
|
@ -19,22 +26,19 @@
|
|||
"writeable": true
|
||||
},
|
||||
{
|
||||
"source": "nextcloud_aio_localai_images",
|
||||
"destination": "/tmp/generated/images/",
|
||||
"source": "nextcloud_aio_localai_backends",
|
||||
"destination": "/backends",
|
||||
"writeable": true
|
||||
},
|
||||
{
|
||||
"source": "%NEXTCLOUD_DATADIR%",
|
||||
"destination": "/nextcloud",
|
||||
"writeable": false
|
||||
}
|
||||
],
|
||||
"enable_nvidia_gpu": false,
|
||||
"secrets": [
|
||||
"LOCAL_AI_PASS"
|
||||
],
|
||||
"ui_secret": "LOCAL_AI_PASS",
|
||||
"devices": [
|
||||
"/dev/dri"
|
||||
],
|
||||
"nextcloud_exec_commands": [
|
||||
"mkdir '/mnt/ncdata/admin/files/nextcloud-aio-local-ai'",
|
||||
"touch '/mnt/ncdata/admin/files/nextcloud-aio-local-ai/models.yaml'",
|
||||
"echo 'Scanning nextcloud-aio-local-ai folder for admin user...'",
|
||||
"php /var/www/html/occ files:scan --path='/admin/files/nextcloud-aio-local-ai'",
|
||||
"php /var/www/html/occ app:install integration_openai",
|
||||
"php /var/www/html/occ app:enable integration_openai",
|
||||
"php /var/www/html/occ config:app:set integration_openai url --value http://nextcloud-aio-local-ai:8080",
|
||||
|
|
|
|||
|
|
@ -1,22 +1,16 @@
|
|||
## Local AI
|
||||
This container bundles Local AI and auto-configures it for you.
|
||||
This container bundles Local AI and auto-configures it for you. It support hardware acceleration with Vulkan.
|
||||
|
||||
### Notes
|
||||
- Make sure to have enough storage space available. This container alone needs ~7GB storage. Every model that you add to `models.yaml` will of course use additional space which adds up quite fast.
|
||||
- After the container was started the first time, you should see a new `nextcloud-aio-local-ai` folder when you open the files app with the default `admin` user. In there you should see a `models.yaml` config file. You can now add models in there. Please refer [here](https://github.com/mudler/LocalAI/blob/master/gallery/index.yaml) where you can get further urls that you can put in there. Afterwards restart all containers from the AIO interface and the models should automatically get downloaded by the local-ai container and activated.
|
||||
- Example for content of `models.yaml` (if you add all of them, it takes around 10GB additional space):
|
||||
```yaml
|
||||
# Stable Diffusion in NCNN with c++, supported txt2img and img2img
|
||||
- url: github:mudler/LocalAI/blob/master/gallery/stablediffusion.yaml
|
||||
name: Stable_diffusion
|
||||
```
|
||||
- To make it work, you first need to browse `https://your-nc-domain.com/settings/admin/ai` and enable or disable specific features for your models in the openAI settings. Afterwards using the Nextcloud Assistant should work.
|
||||
Documentation is available on the container repository. This documentation is regularly updated and is intended to be as simple and detailed as possible. Thanks for all your feedback!
|
||||
|
||||
- See https://github.com/docjyJ/aio-local-ai-vulkan#getting-started for getting start with this container.
|
||||
- See [this guide](https://github.com/nextcloud/all-in-one/discussions/5430) for how to improve AI task pickup speed
|
||||
- See https://github.com/nextcloud/all-in-one/tree/main/community-containers#community-containers how to add it to the AIO stack
|
||||
- Note that Nextcloud supports only one server for AI queries, so this container cannot be used at the same time as other AI containers.
|
||||
|
||||
### Repository
|
||||
https://github.com/szaimen/aio-local-ai
|
||||
https://github.com/docjyJ/aio-local-ai-vulkan
|
||||
|
||||
### Maintainer
|
||||
https://github.com/szaimen
|
||||
https://github.com/docjyJ
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue