- Integrate ollama + ai-gateway into root docker-compose.yml (NVIDIA GPU runtime, single compose for all services) - Change NAS mount from SMB (NAS_SMB_PATH) to NFS (NAS_NFS_PATH) Default: /mnt/nas/Document_Server (fstab registered on GPU server) - Update config.yaml AI endpoints: primary → Mac mini MLX via Tailscale (100.76.254.116:8800) fallback/embedding/vision/rerank → ollama (same Docker network) gateway → ai-gateway (same Docker network) - Update credentials.env.example (remove GPU_SERVER_IP, add NFS path) - Mark gpu-server/docker-compose.yml as deprecated - Update CLAUDE.md network diagram and AI model config - Update architecture.md, deploy.md, devlog.md for GPU server as main - Caddyfile: auto_https off, HTTP only (TLS at upstream proxy) - Caddy port: 127.0.0.1:8080:80 (localhost only) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
40 lines
723 B
Caddyfile
40 lines
723 B
Caddyfile
{
|
|
auto_https off
|
|
}
|
|
|
|
http://document.hyungi.net {
|
|
encode gzip
|
|
|
|
# API + 문서 → FastAPI
|
|
handle /api/* {
|
|
reverse_proxy fastapi:8000
|
|
}
|
|
handle /docs {
|
|
reverse_proxy fastapi:8000
|
|
}
|
|
handle /openapi.json {
|
|
reverse_proxy fastapi:8000
|
|
}
|
|
handle /health {
|
|
reverse_proxy fastapi:8000
|
|
}
|
|
handle /setup {
|
|
reverse_proxy fastapi:8000
|
|
}
|
|
|
|
# 프론트엔드
|
|
handle {
|
|
reverse_proxy frontend:3000
|
|
}
|
|
}
|
|
|
|
# Synology Office 프록시
|
|
http://office.hyungi.net {
|
|
reverse_proxy https://ds1525.hyungi.net:5001 {
|
|
header_up Host {upstream_hostport}
|
|
transport http {
|
|
tls_insecure_skip_verify
|
|
}
|
|
}
|
|
}
|