Upload files to "/"

This commit is contained in:
zwnk 2025-01-22 18:21:29 +01:00
commit 56d63e6a79
5 changed files with 167 additions and 0 deletions

82
.env Normal file
View file

@ -0,0 +1,82 @@
PORT=3010
DATABASE_PATH=/root/.flowise
APIKEY_PATH=/root/.flowise
SECRETKEY_PATH=/root/.flowise
LOG_PATH=/root/.flowise/logs
BLOB_STORAGE_PATH=/root/.flowise/storage
# APIKEY_STORAGE_TYPE=json (json | db)
# NUMBER_OF_PROXIES= 1
# CORS_ORIGINS=*
# IFRAME_ORIGINS=*
# DATABASE_TYPE=postgres
# DATABASE_PORT=5432
# DATABASE_HOST=""
# DATABASE_NAME=flowise
# DATABASE_USER=root
# DATABASE_PASSWORD=mypassword
# DATABASE_SSL=true
# DATABASE_SSL_KEY_BASE64=<Self signed certificate in BASE64>
# SECRETKEY_STORAGE_TYPE=local #(local | aws)
# SECRETKEY_PATH=/your_api_key_path/.flowise
# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey
# SECRETKEY_AWS_ACCESS_KEY=<your-access-key>
# SECRETKEY_AWS_SECRET_KEY=<your-secret-key>
# SECRETKEY_AWS_REGION=us-west-2
# FLOWISE_USERNAME=user
# FLOWISE_PASSWORD=1234
# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey
# FLOWISE_FILE_SIZE_LIMIT=50mb
# DISABLE_CHATFLOW_REUSE=true
# DEBUG=true
# LOG_LEVEL=info (error | warn | info | verbose | debug)
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
# LANGCHAIN_TRACING_V2=true
# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com
# LANGCHAIN_API_KEY=your_api_key
# LANGCHAIN_PROJECT=your_project
# DISABLE_FLOWISE_TELEMETRY=true
# Uncomment the following line to enable model list config, load the list of models from your local config file
# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format
# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path
# STORAGE_TYPE=local (local | s3)
# BLOB_STORAGE_PATH=/your_storage_path/.flowise/storage
# S3_STORAGE_BUCKET_NAME=flowise
# S3_STORAGE_ACCESS_KEY_ID=<your-access-key>
# S3_STORAGE_SECRET_ACCESS_KEY=<your-secret-key>
# S3_STORAGE_REGION=us-west-2
# S3_ENDPOINT_URL=<custom-s3-endpoint-url>
# S3_FORCE_PATH_STYLE=false
# SHOW_COMMUNITY_NODES=true
# DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable)
######################
# METRICS COLLECTION
#######################
# ENABLE_METRICS=false
# METRICS_PROVIDER=prometheus # prometheus | open_telemetry
# METRICS_INCLUDE_NODE_METRICS=true # default is true
# METRICS_SERVICE_NAME=FlowiseAI
# ONLY NEEDED if METRICS_PROVIDER=open_telemetry
# METRICS_OPEN_TELEMETRY_METRIC_ENDPOINT=http://localhost:4318/v1/metrics
# METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http)
# METRICS_OPEN_TELEMETRY_DEBUG=true # default is false
# Uncomment the following lines to enable global agent proxy
# see https://www.npmjs.com/package/global-agent for more details
# GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl
# GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl
# GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded

3
.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
/open-webui
/ollama
/.flowise

View file

@ -0,0 +1,39 @@
services:
flowise:
image: flowiseai/flowise
restart: always
environment:
- PORT=${PORT}
- CORS_ORIGINS=${CORS_ORIGINS}
- IFRAME_ORIGINS=${IFRAME_ORIGINS}
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
- FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT}
- DEBUG=${DEBUG}
- DATABASE_PATH=${DATABASE_PATH}
- DATABASE_TYPE=${DATABASE_TYPE}
- DATABASE_PORT=${DATABASE_PORT}
- DATABASE_HOST=${DATABASE_HOST}
- DATABASE_NAME=${DATABASE_NAME}
- DATABASE_USER=${DATABASE_USER}
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
- DATABASE_SSL=${DATABASE_SSL}
- DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64}
- APIKEY_STORAGE_TYPE=${APIKEY_STORAGE_TYPE}
- APIKEY_PATH=${APIKEY_PATH}
- SECRETKEY_PATH=${SECRETKEY_PATH}
- FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE}
- LOG_LEVEL=${LOG_LEVEL}
- LOG_PATH=${LOG_PATH}
- BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH}
- DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY}
- MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON}
- GLOBAL_AGENT_HTTP_PROXY=${GLOBAL_AGENT_HTTP_PROXY}
- GLOBAL_AGENT_HTTPS_PROXY=${GLOBAL_AGENT_HTTPS_PROXY}
- GLOBAL_AGENT_NO_PROXY=${GLOBAL_AGENT_NO_PROXY}
- DISABLED_NODES=${DISABLED_NODES}
ports:
- '${PORT}:${PORT}'
volumes:
- ./.flowise:/root/.flowise
entrypoint: /bin/sh -c "sleep 3; flowise start"

43
docker-compose-nvidia.yml Normal file
View file

@ -0,0 +1,43 @@
services:
ollama:
volumes:
- ollama:/root/.ollama
container_name: ollama
pull_policy: always
tty: true
restart: unless-stopped
ports:
- 3001:11434
image: ollama/ollama:${OLLAMA_DOCKER_TAG-latest}
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
open-webui:
build:
context: .
args:
OLLAMA_BASE_URL: '/ollama'
dockerfile: Dockerfile
image: ghcr.io/open-webui/open-webui:${WEBUI_DOCKER_TAG-main}
container_name: open-webui
volumes:
- open-webui:/app/backend/data
depends_on:
- ollama
ports:
- ${OPEN_WEBUI_PORT-3000}:8080
environment:
- 'OLLAMA_BASE_URL=http://ollama:11434'
- 'WEBUI_SECRET_KEY='
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
volumes:
ollama: {}
open-webui: {}

BIN
docker-compose.yml Normal file

Binary file not shown.