provisioning/schemas/platform/templates/docker-compose/platform-stack.multiuser.yml.ncl
Jesús Pérez 44648e3206
chore: complete nickel migration and consolidate legacy configs
- Remove KCL ecosystem (~220 files deleted)
- Migrate all infrastructure to Nickel schema system
- Consolidate documentation: legacy docs → provisioning/docs/src/
- Add CI/CD workflows (.github/) and Rust build config (.cargo/)
- Update core system for Nickel schema parsing
- Update README.md and CHANGES.md for v5.0.0 release
- Fix pre-commit hooks: end-of-file, trailing-whitespace
- Breaking changes: KCL workspaces require migration
- Migration bridge available in docs/src/development/
2026-01-08 09:55:37 +00:00

193 lines
5.0 KiB
Plaintext

# Docker Compose Platform Stack - MultiUser Mode
# Full platform with PostgreSQL, Gitea, and monitoring
# For team collaboration and staging environments
{
version = "3.8",
services = {
postgres = {
image = "postgres:15-alpine",
container_name = "postgres",
environment = {
POSTGRES_DB = "provisioning",
POSTGRES_USER = "provisioning",
POSTGRES_PASSWORD = "provisioning_dev",
},
volumes = [
"postgres_data:/var/lib/postgresql/data",
],
networks = ["provisioning"],
restart = "unless-stopped",
healthcheck = {
test = ["CMD-SHELL", "pg_isready -U provisioning"],
interval = "10s",
timeout = "5s",
retries = 5,
},
},
orchestrator = {
image = "provisioning-orchestrator:latest",
container_name = "orchestrator",
ports = [
"9090:9090",
],
environment = {
ORCHESTRATOR_MODE = "multiuser",
ORCHESTRATOR_SERVER_HOST = "0.0.0.0",
ORCHESTRATOR_SERVER_PORT = "9090",
ORCHESTRATOR_STORAGE_BACKEND = "surrealdb_server",
ORCHESTRATOR_SURREALDB_URL = "surrealdb://surrealdb:8000",
ORCHESTRATOR_SURREALDB_NAMESPACE = "provisioning",
ORCHESTRATOR_SURREALDB_DATABASE = "orchestrator",
ORCHESTRATOR_LOG_LEVEL = "debug",
},
volumes = [
"orchestrator_logs:/var/log/provisioning/orchestrator",
],
networks = ["provisioning"],
restart = "unless-stopped",
depends_on = {
surrealdb = {
condition = "service_healthy",
},
},
healthcheck = {
test = ["CMD", "curl", "-f", "http://localhost:9090/health"],
interval = "30s",
timeout = "10s",
retries = 3,
start_period = "40s",
},
},
control-center = {
image = "provisioning-control-center:latest",
container_name = "control-center",
ports = [
"8080:8080",
],
environment = {
CONTROL_CENTER_MODE = "multiuser",
CONTROL_CENTER_SERVER_HOST = "0.0.0.0",
CONTROL_CENTER_SERVER_PORT = "8080",
CONTROL_CENTER_DATABASE = "postgres",
CONTROL_CENTER_DATABASE_URL = "postgresql://provisioning:provisioning_dev@postgres/provisioning",
CONTROL_CENTER_LOG_LEVEL = "debug",
CONTROL_CENTER_MFA_REQUIRED = "false",
},
volumes = [
"control_center_logs:/var/log/provisioning/control-center",
],
networks = ["provisioning"],
restart = "unless-stopped",
depends_on = {
postgres = {
condition = "service_healthy",
},
orchestrator = {
condition = "service_healthy",
},
},
healthcheck = {
test = ["CMD", "curl", "-f", "http://localhost:8080/health"],
interval = "30s",
timeout = "10s",
retries = 3,
start_period = "40s",
},
},
mcp-server = {
image = "provisioning-mcp-server:latest",
container_name = "mcp-server",
ports = [
"8888:8888",
],
environment = {
MCP_SERVER_MODE = "multiuser",
MCP_SERVER_HOST = "0.0.0.0",
MCP_SERVER_PORT = "8888",
MCP_SERVER_PROTOCOL = "stdio",
MCP_SERVER_LOG_LEVEL = "debug",
},
volumes = [
"mcp_server_logs:/var/log/provisioning/mcp-server",
],
networks = ["provisioning"],
restart = "unless-stopped",
depends_on = ["orchestrator", "control-center"],
healthcheck = {
test = ["CMD", "curl", "-f", "http://localhost:8888/health"],
interval = "30s",
timeout = "10s",
retries = 3,
start_period = "40s",
},
},
surrealdb = {
image = "surrealdb/surrealdb:latest",
container_name = "surrealdb",
command = "start --log=info",
ports = [
"8000:8000",
],
volumes = [
"surrealdb_data:/var/lib/surrealdb",
],
networks = ["provisioning"],
restart = "unless-stopped",
healthcheck = {
test = ["CMD", "curl", "-f", "http://localhost:8000/health"],
interval = "10s",
timeout = "5s",
retries = 5,
},
},
gitea = {
image = "gitea/gitea:latest",
container_name = "gitea",
ports = [
"3000:3000",
"2222:22",
],
environment = {
GITEA_APP_NAME = "Provisioning Gitea",
GITEA_RUN_MODE = "prod",
GITEA_SSH_PORT = "2222",
},
volumes = [
"gitea_data:/data",
],
networks = ["provisioning"],
restart = "unless-stopped",
depends_on = ["postgres"],
healthcheck = {
test = ["CMD", "curl", "-f", "http://localhost:3000"],
interval = "30s",
timeout = "10s",
retries = 3,
start_period = "40s",
},
},
},
volumes = {
postgres_data = null,
orchestrator_logs = null,
control_center_logs = null,
mcp_server_logs = null,
surrealdb_data = null,
gitea_data = null,
},
networks = {
provisioning = {
driver = "bridge",
},
},
}