ee
This commit is contained in:
@@ -2,13 +2,13 @@ resource "coder_app" "code_server" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "code-server"
|
||||
display_name = "VS Code"
|
||||
url = "http://localhost:8080"
|
||||
url = "http://localhost:13337?folder=/workspaces"
|
||||
icon = "/icon/code.svg"
|
||||
subdomain = true
|
||||
subdomain = false
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:8080/healthz"
|
||||
url = "http://localhost:13337/healthz"
|
||||
interval = 10
|
||||
threshold = 5
|
||||
}
|
||||
@@ -19,7 +19,7 @@ resource "coder_app" "terminal" {
|
||||
slug = "terminal"
|
||||
display_name = "Terminal"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; cd /workspaces && exec bash\"'"
|
||||
}
|
||||
|
||||
resource "coder_app" "pgadmin" {
|
||||
@@ -27,13 +27,13 @@ resource "coder_app" "pgadmin" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "pgadmin"
|
||||
display_name = "pgAdmin"
|
||||
url = "http://localhost:5050"
|
||||
url = "http://pgadmin-${local.workspace_id}:5050"
|
||||
icon = "/icon/postgres.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:5050"
|
||||
url = "http://pgadmin-${local.workspace_id}:5050"
|
||||
interval = 15
|
||||
threshold = 5
|
||||
}
|
||||
@@ -44,27 +44,28 @@ resource "coder_app" "qdrant" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "qdrant"
|
||||
display_name = "Qdrant"
|
||||
url = "http://localhost:6333"
|
||||
url = "http://qdrant-${local.workspace_id}:6333"
|
||||
icon = "/icon/database.svg"
|
||||
subdomain = false
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:6333/health"
|
||||
url = "http://qdrant-${local.workspace_id}:6333/health"
|
||||
interval = 30
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_app" "jupyter" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
resource "coder_app" "marimo" {
|
||||
count = data.coder_parameter.enable_marimo.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "jupyter"
|
||||
display_name = "JupyterLab"
|
||||
slug = "marimo"
|
||||
display_name = "Marimo"
|
||||
url = "http://localhost:8888"
|
||||
icon = "/icon/jupyter.svg"
|
||||
subdomain = true
|
||||
icon = "/icon/python.svg"
|
||||
subdomain = false
|
||||
share = "owner"
|
||||
group = "Development Services"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:8888"
|
||||
@@ -103,6 +104,7 @@ resource "coder_app" "dev_ports" {
|
||||
icon = each.value.icon
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
group = "Development Services"
|
||||
|
||||
healthcheck {
|
||||
url = each.value.url
|
||||
@@ -115,9 +117,9 @@ resource "coder_app" "claude_cli" {
|
||||
count = data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "claude-cli"
|
||||
display_name = "Claude CLI"
|
||||
display_name = "Claude Code"
|
||||
icon = "/icon/claude.svg"
|
||||
command = "bash -lc 'claude --dangerously-skip-permissions'"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; cd /workspaces && claude --dangerously-skip-permissions\"'"
|
||||
group = "AI Tools"
|
||||
order = 10
|
||||
}
|
||||
@@ -126,9 +128,70 @@ resource "coder_app" "codex_cli" {
|
||||
count = data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "codex-cli"
|
||||
display_name = "Codex CLI"
|
||||
icon = "/icon/code.svg"
|
||||
command = "bash -lc 'codex --dangerously-bypass-approvals-and-sandbox'"
|
||||
display_name = "Codex"
|
||||
icon = "/icon/openai.svg"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; cd /workspaces && codex --dangerously-bypass-approvals-and-sandbox\"'"
|
||||
group = "AI Tools"
|
||||
order = 20
|
||||
}
|
||||
|
||||
resource "coder_app" "gemini_cli" {
|
||||
count = data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "gemini-cli"
|
||||
display_name = "Gemini CLI"
|
||||
icon = "/icon/google.svg"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; cd /workspaces && gemini\"'"
|
||||
group = "AI Tools"
|
||||
order = 30
|
||||
}
|
||||
|
||||
resource "coder_app" "superfile" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "superfile"
|
||||
display_name = "File Explorer"
|
||||
icon = "/icon/folder.svg"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; cd /workspaces && spf\"'"
|
||||
group = "Terminal Tools"
|
||||
order = 10
|
||||
}
|
||||
|
||||
resource "coder_app" "lazygit" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "lazygit"
|
||||
display_name = "Git Manager"
|
||||
icon = "/icon/git.svg"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; cd /workspaces && lazygit\"'"
|
||||
group = "Terminal Tools"
|
||||
order = 20
|
||||
}
|
||||
|
||||
resource "coder_app" "lazydocker" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "lazydocker"
|
||||
display_name = "Docker Manager"
|
||||
icon = "/icon/docker.svg"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; cd /workspaces && lazydocker\"'"
|
||||
group = "Terminal Tools"
|
||||
order = 30
|
||||
}
|
||||
|
||||
resource "coder_app" "btop" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "btop"
|
||||
display_name = "System Monitor"
|
||||
icon = "/icon/monitor.svg"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; btop\"'"
|
||||
group = "Terminal Tools"
|
||||
order = 40
|
||||
}
|
||||
|
||||
resource "coder_app" "logs_viewer" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "logs-viewer"
|
||||
display_name = "Log Viewer"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash -lc 'exec sudo -u coder -i bash -c \"export GEM_HOME=/home/coder/.gem; export GEM_PATH=/home/coder/.gem; echo \\\"=== Workspace Logs ===\\\" && tail -f /tmp/*.log 2>/dev/null || echo \\\"No log files found in /tmp/\\\"\"'"
|
||||
group = "Terminal Tools"
|
||||
order = 50
|
||||
}
|
||||
|
||||
@@ -72,10 +72,10 @@ data "coder_parameter" "enable_pgadmin" {
|
||||
order = 4
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jupyter" {
|
||||
name = "enable_jupyter"
|
||||
display_name = "Expose JupyterLab"
|
||||
description = "Start the optional JupyterLab container."
|
||||
data "coder_parameter" "enable_marimo" {
|
||||
name = "enable_marimo"
|
||||
display_name = "Expose Marimo"
|
||||
description = "Start the optional Marimo notebook container."
|
||||
type = "bool"
|
||||
default = "false"
|
||||
mutable = true
|
||||
@@ -120,8 +120,8 @@ locals {
|
||||
|
||||
services_enabled = data.coder_parameter.enable_services.value
|
||||
pgadmin_enabled = data.coder_parameter.enable_pgadmin.value
|
||||
jupyter_enabled = data.coder_parameter.enable_jupyter.value
|
||||
port_forwarding = local.services_enabled || local.jupyter_enabled
|
||||
marimo_enabled = data.coder_parameter.enable_marimo.value
|
||||
port_forwarding = local.services_enabled || local.marimo_enabled
|
||||
|
||||
postgres_url = "postgresql://postgres:${var.postgres_password}@postgres-${local.workspace_id}:5432/postgres"
|
||||
redis_url = "redis://:${var.redis_password}@redis-${local.workspace_id}:6379"
|
||||
@@ -130,29 +130,31 @@ locals {
|
||||
agent_startup = join("\n", compact([
|
||||
"set -eu",
|
||||
"export CODER_WORKSPACE_ID=${local.workspace_id}",
|
||||
"# SSL Certificate setup (non-interactive)",
|
||||
"# Fix RVM environment variables to suppress warnings",
|
||||
"export GEM_HOME=\"$HOME/.gem\"",
|
||||
"export GEM_PATH=\"$HOME/.gem\"",
|
||||
"# Ensure required directories exist",
|
||||
"mkdir -p /home/coder/code-tools/tf/scripts",
|
||||
"mkdir -p /home/coder/code-tools/tf/scripts/agentapi",
|
||||
"# SSL Certificate setup (running as root)",
|
||||
"if [ -f /home/coder/lab-certs/lab.crt ]; then",
|
||||
" if [ -w /usr/local/share/ca-certificates/ ] 2>/dev/null; then",
|
||||
" cp /home/coder/lab-certs/lab.crt /usr/local/share/ca-certificates/lab.crt",
|
||||
" update-ca-certificates 2>/dev/null || echo 'Cannot update ca-certificates'",
|
||||
" else",
|
||||
" # Try with sudo if available and non-interactive",
|
||||
" if command -v sudo >/dev/null 2>&1 && sudo -n true 2>/dev/null; then",
|
||||
" sudo cp /home/coder/lab-certs/lab.crt /usr/local/share/ca-certificates/lab.crt",
|
||||
" sudo update-ca-certificates 2>/dev/null || echo 'Cannot update ca-certificates'",
|
||||
" else",
|
||||
" echo 'SSL cert not available - insufficient permissions'",
|
||||
" fi",
|
||||
" fi",
|
||||
" git config --global http.\"https://git.lab\".sslCAInfo /home/coder/lab-certs/lab.crt || echo 'Cannot configure git ssl'",
|
||||
" echo 'Installing SSL certificate for lab.crt'",
|
||||
" # Running as root, so direct access to system directories",
|
||||
" cp /home/coder/lab-certs/lab.crt /usr/local/share/ca-certificates/lab.crt 2>/dev/null && echo 'SSL cert copied successfully' || echo 'Cannot copy SSL cert'",
|
||||
" update-ca-certificates 2>/dev/null && echo 'CA certificates updated successfully' || echo 'Cannot update ca-certificates'",
|
||||
" # Configure git globally for coder user",
|
||||
" sudo -u coder git config --global http.\"https://git.lab\".sslCAInfo /home/coder/lab-certs/lab.crt || echo 'Cannot configure git ssl'",
|
||||
" sudo -u coder git config --global http.\"https://git.lab\".sslVerify true || echo 'Cannot configure git ssl verify'",
|
||||
"else",
|
||||
" echo 'SSL cert not available'",
|
||||
" echo 'SSL cert not available at /home/coder/lab-certs/lab.crt'",
|
||||
" # Configure git to skip SSL verification for git.lab if no cert available",
|
||||
" sudo -u coder git config --global http.\"https://git.lab\".sslVerify false || echo 'Cannot configure git ssl skip'",
|
||||
"fi",
|
||||
"# End SSL setup",
|
||||
local.project_repo_url != "" ? "if [ ! -d /workspaces/.git ]; then ${local.repo_clone_command}; fi" : "",
|
||||
"export ENABLE_PGADMIN=${tostring(local.pgadmin_enabled)}",
|
||||
"export ENABLE_JUPYTER=${tostring(local.jupyter_enabled)}",
|
||||
local.port_forwarding ? "bash /home/coder/code-tools/tf/scripts/port-forward.sh" : "echo 'No service port forwarding requested'"
|
||||
"export ENABLE_MARIMO=${tostring(local.marimo_enabled)}",
|
||||
local.port_forwarding ? "bash /home/coder/code-tools/tf/scripts/port-forward.sh || echo 'Port forwarding script not found or failed'" : "echo 'No service port forwarding requested'"
|
||||
]))
|
||||
}
|
||||
|
||||
|
||||
@@ -1,181 +0,0 @@
|
||||
terraform {
|
||||
required_version = ">= 1.3.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 2.7"
|
||||
}
|
||||
docker = {
|
||||
source = "kreuzwerker/docker"
|
||||
version = "~> 2.25"
|
||||
}
|
||||
http = {
|
||||
source = "hashicorp/http"
|
||||
version = ">= 3.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "coder" {}
|
||||
|
||||
provider "docker" {
|
||||
host = var.docker_socket != "" ? var.docker_socket : null
|
||||
}
|
||||
|
||||
provider "http" {}
|
||||
|
||||
# Workspace context
|
||||
|
||||
data "coder_provisioner" "me" {}
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
# User inputs kept intentionally small so the template is easy to launch.
|
||||
|
||||
data "coder_parameter" "project_repository" {
|
||||
name = "project_repository"
|
||||
display_name = "Project repository"
|
||||
description = "Optional Git URL cloned into /workspaces on first startup."
|
||||
default = ""
|
||||
mutable = true
|
||||
order = 1
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_services" {
|
||||
name = "enable_services"
|
||||
display_name = "Enable PostgreSQL / Redis / Qdrant"
|
||||
description = "Provision bundled data services inside the workspace network."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 2
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_ai_tools" {
|
||||
name = "enable_ai_tools"
|
||||
display_name = "Install AI tooling"
|
||||
description = "Run the bundled AI helper scripts (Claude, Cursor, Windsurf)."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 3
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_pgadmin" {
|
||||
name = "enable_pgadmin"
|
||||
display_name = "Expose pgAdmin"
|
||||
description = "Start the pgAdmin container when database services are enabled."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 4
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jupyter" {
|
||||
name = "enable_jupyter"
|
||||
display_name = "Expose JupyterLab"
|
||||
description = "Start the optional JupyterLab container."
|
||||
type = "bool"
|
||||
default = "false"
|
||||
mutable = true
|
||||
order = 5
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jetbrains" {
|
||||
name = "enable_jetbrains"
|
||||
display_name = "JetBrains Gateway"
|
||||
description = "Install JetBrains Gateway integration for this workspace."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 6
|
||||
}
|
||||
|
||||
data "coder_parameter" "ai_prompt" {
|
||||
name = "AI Prompt"
|
||||
display_name = "AI Task Prompt"
|
||||
description = "Optional pre-filled prompt shown when starting a Claude Code task."
|
||||
type = "string"
|
||||
default = ""
|
||||
mutable = true
|
||||
order = 7
|
||||
form_type = "textarea"
|
||||
}
|
||||
|
||||
locals {
|
||||
bind_mounts = [
|
||||
{ host = "${var.host_home_path}", container = "/home/coder" },
|
||||
{ host = "/var/run/docker.sock", container = "/var/run/docker.sock" },
|
||||
]
|
||||
|
||||
workspace_id = data.coder_workspace.me.id
|
||||
container_name = "coder-${local.workspace_id}"
|
||||
|
||||
git_author_name = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||
git_author_email = data.coder_workspace_owner.me.email
|
||||
|
||||
project_repo_url = trimspace(data.coder_parameter.project_repository.value)
|
||||
repo_clone_command = local.project_repo_url != "" ? "git clone ${local.project_repo_url} /workspaces" : "echo 'No repository requested'"
|
||||
|
||||
services_enabled = data.coder_parameter.enable_services.value
|
||||
pgadmin_enabled = data.coder_parameter.enable_pgadmin.value
|
||||
jupyter_enabled = data.coder_parameter.enable_jupyter.value
|
||||
port_forwarding = local.services_enabled || local.jupyter_enabled
|
||||
|
||||
postgres_url = "postgresql://postgres:${var.postgres_password}@postgres-${local.workspace_id}:5432/postgres"
|
||||
redis_url = "redis://:${var.redis_password}@redis-${local.workspace_id}:6379"
|
||||
qdrant_url = "http://qdrant-${local.workspace_id}:6333"
|
||||
|
||||
agent_startup = join("\n", compact([
|
||||
"set -eu",
|
||||
"export CODER_WORKSPACE_ID=${local.workspace_id}",
|
||||
"# SSL Certificate setup",
|
||||
"cp /home/coder/lab-certs/lab.crt /usr/local/share/ca-certificates/lab.crt || echo 'SSL cert not available'",
|
||||
"update-ca-certificates 2>/dev/null || echo 'Cannot update ca-certificates'",
|
||||
"git config --global http.\"https://git.lab\".sslCAInfo /home/coder/lab-certs/lab.crt || echo 'Cannot configure git ssl'",
|
||||
"# End SSL setup",
|
||||
local.project_repo_url != "" ? "if [ ! -d /workspaces/.git ]; then ${local.repo_clone_command}; fi" : "",
|
||||
"export ENABLE_PGADMIN=${tostring(local.pgadmin_enabled)}",
|
||||
"export ENABLE_JUPYTER=${tostring(local.jupyter_enabled)}",
|
||||
local.port_forwarding ? "bash /home/coder/code-tools/tf/scripts/port-forward.sh" : "echo 'No service port forwarding requested'"
|
||||
]))
|
||||
}
|
||||
|
||||
# Workspace network keeps the workspace stack isolated from the host.
|
||||
resource "docker_network" "workspace" {
|
||||
name = "coder-${local.workspace_id}"
|
||||
driver = "bridge"
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
}
|
||||
}
|
||||
|
||||
# Persistent workspace data volume mounted at /workspaces inside the container.
|
||||
resource "docker_volume" "workspaces" {
|
||||
name = "workspaces-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.type"
|
||||
value = "workspace-data"
|
||||
}
|
||||
}
|
||||
|
||||
# Separate persistent home directory for the coder user.
|
||||
# Base development container image (customise via terraform.tfvars).
|
||||
resource "docker_image" "devcontainer" {
|
||||
name = var.devcontainer_image
|
||||
keep_locally = true
|
||||
}
|
||||
@@ -1,198 +0,0 @@
|
||||
terraform {
|
||||
required_version = ">= 1.3.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 2.7"
|
||||
}
|
||||
docker = {
|
||||
source = "kreuzwerker/docker"
|
||||
version = "~> 2.25"
|
||||
}
|
||||
http = {
|
||||
source = "hashicorp/http"
|
||||
version = ">= 3.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "coder" {}
|
||||
|
||||
provider "docker" {
|
||||
host = var.docker_socket != "" ? var.docker_socket : null
|
||||
}
|
||||
|
||||
provider "http" {}
|
||||
|
||||
# Workspace context
|
||||
|
||||
data "coder_provisioner" "me" {}
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
# User inputs kept intentionally small so the template is easy to launch.
|
||||
|
||||
data "coder_parameter" "project_repository" {
|
||||
name = "project_repository"
|
||||
display_name = "Project repository"
|
||||
description = "Optional Git URL cloned into /workspaces on first startup."
|
||||
default = ""
|
||||
mutable = true
|
||||
order = 1
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_services" {
|
||||
name = "enable_services"
|
||||
display_name = "Enable PostgreSQL / Redis / Qdrant"
|
||||
description = "Provision bundled data services inside the workspace network."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 2
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_ai_tools" {
|
||||
name = "enable_ai_tools"
|
||||
display_name = "Install AI tooling"
|
||||
description = "Run the bundled AI helper scripts (Claude, Cursor, Windsurf)."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 3
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_pgadmin" {
|
||||
name = "enable_pgadmin"
|
||||
display_name = "Expose pgAdmin"
|
||||
description = "Start the pgAdmin container when database services are enabled."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 4
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jupyter" {
|
||||
name = "enable_jupyter"
|
||||
display_name = "Expose JupyterLab"
|
||||
description = "Start the optional JupyterLab container."
|
||||
type = "bool"
|
||||
default = "false"
|
||||
mutable = true
|
||||
order = 5
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jetbrains" {
|
||||
name = "enable_jetbrains"
|
||||
display_name = "JetBrains Gateway"
|
||||
description = "Install JetBrains Gateway integration for this workspace."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 6
|
||||
}
|
||||
|
||||
data "coder_parameter" "ai_prompt" {
|
||||
name = "AI Prompt"
|
||||
display_name = "AI Task Prompt"
|
||||
description = "Optional pre-filled prompt shown when starting a Claude Code task."
|
||||
type = "string"
|
||||
default = ""
|
||||
mutable = true
|
||||
order = 7
|
||||
form_type = "textarea"
|
||||
}
|
||||
|
||||
locals {
|
||||
bind_mounts = [
|
||||
{ host = "${var.host_home_path}/.gitconfig", container = "/home/coder/.gitconfig" },
|
||||
{ host = "${var.host_home_path}/.git-credentials", container = "/home/coder/.git-credentials" },
|
||||
{ host = "${var.host_home_path}/.ssh", container = "/home/coder/.ssh" },
|
||||
{ host = "${var.host_home_path}/.zshrc", container = "/home/coder/.zshrc" },
|
||||
{ host = "${var.host_home_path}/.oh-my-zsh", container = "/home/coder/.oh-my-zsh" },
|
||||
{ host = "${var.host_home_path}/.zsh_history", container = "/home/coder/.zsh_history" },
|
||||
{ host = "${var.host_home_path}/.p10k.zsh", container = "/home/coder/.p10k.zsh" },
|
||||
{ host = "${var.host_home_path}/.claude", container = "/home/coder/.claude" },
|
||||
{ host = "${var.host_home_path}/.codex", container = "/home/coder/.codex" },
|
||||
{ host = "${var.host_home_path}/.1password", container = "/home/coder/.1password" },
|
||||
{ host = "${var.host_home_path}/.config", container = "/home/coder/.config" },
|
||||
{ host = "${var.host_home_path}/.local", container = "/home/coder/.local" },
|
||||
{ host = "${var.host_home_path}/.cache", container = "/home/coder/.cache" },
|
||||
{ host = "${var.host_home_path}/.docker/config.json", container = "/home/coder/.docker/config.json" },
|
||||
{ host = "${var.host_home_path}/code-tools", container = "/home/coder/code-tools" },
|
||||
{ host = "${var.host_home_path}/claude-scripts", container = "/home/coder/claude-scripts" },
|
||||
{ host = "${var.host_home_path}/lab-certs", container = "/home/coder/lab-certs" },
|
||||
]
|
||||
|
||||
workspace_id = data.coder_workspace.me.id
|
||||
container_name = "coder-${local.workspace_id}"
|
||||
|
||||
git_author_name = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||
git_author_email = data.coder_workspace_owner.me.email
|
||||
|
||||
project_repo_url = trimspace(data.coder_parameter.project_repository.value)
|
||||
repo_clone_command = local.project_repo_url != "" ? "git clone ${local.project_repo_url} /workspaces" : "echo 'No repository requested'"
|
||||
|
||||
services_enabled = data.coder_parameter.enable_services.value
|
||||
pgadmin_enabled = data.coder_parameter.enable_pgadmin.value
|
||||
jupyter_enabled = data.coder_parameter.enable_jupyter.value
|
||||
port_forwarding = local.services_enabled || local.jupyter_enabled
|
||||
|
||||
postgres_url = "postgresql://postgres:${var.postgres_password}@postgres-${local.workspace_id}:5432/postgres"
|
||||
redis_url = "redis://:${var.redis_password}@redis-${local.workspace_id}:6379"
|
||||
qdrant_url = "http://qdrant-${local.workspace_id}:6333"
|
||||
|
||||
agent_startup = join("\n", compact([
|
||||
"set -eu",
|
||||
"export CODER_WORKSPACE_ID=${local.workspace_id}",
|
||||
"# SSL Certificate setup",
|
||||
"sudo cp /home/coder/lab-certs/lab.crt /usr/local/share/ca-certificates/lab.crt || echo 'SSL cert not available'",
|
||||
"sudo update-ca-certificates || echo 'Cannot update ca-certificates'",
|
||||
"git config --global http.\"https://git.lab\".sslCAInfo /home/coder/lab-certs/lab.crt || echo 'Cannot configure git ssl'",
|
||||
"# End SSL setup",
|
||||
"git config --global user.name \"${local.git_author_name}\"",
|
||||
"git config --global user.email \"${local.git_author_email}\"",
|
||||
local.project_repo_url != "" ? "if [ ! -d /workspaces/.git ]; then ${local.repo_clone_command}; fi" : "",
|
||||
"export ENABLE_PGADMIN=${tostring(local.pgadmin_enabled)}",
|
||||
"export ENABLE_JUPYTER=${tostring(local.jupyter_enabled)}",
|
||||
local.port_forwarding ? "bash /home/coder/code-tools/tf/scripts/port-forward.sh" : "echo 'No service port forwarding requested'"
|
||||
]))
|
||||
}
|
||||
|
||||
# Workspace network keeps the workspace stack isolated from the host.
|
||||
resource "docker_network" "workspace" {
|
||||
name = "coder-${local.workspace_id}"
|
||||
driver = "bridge"
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
}
|
||||
}
|
||||
|
||||
# Persistent workspace data volume mounted at /workspaces inside the container.
|
||||
resource "docker_volume" "workspaces" {
|
||||
name = "workspaces-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.type"
|
||||
value = "workspace-data"
|
||||
}
|
||||
}
|
||||
|
||||
# Separate persistent home directory for the coder user.
|
||||
# Base development container image (customise via terraform.tfvars).
|
||||
resource "docker_image" "devcontainer" {
|
||||
name = var.devcontainer_image
|
||||
keep_locally = true
|
||||
}
|
||||
@@ -14,11 +14,18 @@ locals {
|
||||
order = 2
|
||||
blocks_login = true
|
||||
}
|
||||
terminal_tools = {
|
||||
display = "Install Terminal Tools"
|
||||
icon = "/icon/terminal.svg"
|
||||
path = "${path.module}/scripts/terminal-tools.sh"
|
||||
order = 3
|
||||
blocks_login = false
|
||||
}
|
||||
git_hooks = {
|
||||
display = "Configure Git Hooks"
|
||||
icon = "/icon/git.svg"
|
||||
path = "${path.module}/scripts/git-hooks.sh"
|
||||
order = 3
|
||||
order = 4
|
||||
blocks_login = false
|
||||
}
|
||||
}
|
||||
@@ -34,10 +41,17 @@ locals {
|
||||
codex = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_codex_support
|
||||
display = "Install Codex CLI"
|
||||
icon = "/icon/code.svg"
|
||||
icon = "/icon/openai.svg"
|
||||
script = "/usr/local/bin/codex-setup.sh"
|
||||
blocks_login = false
|
||||
}
|
||||
gemini = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_gemini_support
|
||||
display = "Install Gemini CLI"
|
||||
icon = "/icon/google.svg"
|
||||
path = "${path.module}/scripts/gemini-setup.sh"
|
||||
blocks_login = false
|
||||
}
|
||||
cursor = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_cursor_support
|
||||
display = "Configure Cursor"
|
||||
@@ -76,5 +90,5 @@ resource "coder_script" "ai" {
|
||||
run_on_start = true
|
||||
start_blocks_login = each.value.blocks_login
|
||||
|
||||
script = "bash ${each.value.script}"
|
||||
script = lookup(each.value, "script", null) != null ? "bash ${each.value.script}" : "echo '${base64encode(file(each.value.path))}' | base64 -d | tr -d '\\r' | bash"
|
||||
}
|
||||
|
||||
37
tf-dockerfile/scripts/agentapi/agentapi-start.sh
Executable file
37
tf-dockerfile/scripts/agentapi/agentapi-start.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# AgentAPI start script
|
||||
# This script starts the AgentAPI server with proper user permissions
|
||||
|
||||
install_agentapi=${1:-false}
|
||||
port=${2:-3284}
|
||||
target_user="${USER:-coder}"
|
||||
|
||||
# Ensure we have proper permissions
|
||||
umask 022
|
||||
|
||||
if [ "$install_agentapi" = "true" ]; then
|
||||
echo "Starting AgentAPI on port $port..."
|
||||
|
||||
# Check if AgentAPI is installed
|
||||
if ! command -v agentapi >/dev/null 2>&1; then
|
||||
echo "Error: agentapi command not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Start AgentAPI as the appropriate user
|
||||
current_uid=$(id -u)
|
||||
if [ "$current_uid" -eq 0 ]; then
|
||||
echo "Running as root - switching to $target_user for AgentAPI"
|
||||
chown -R "$target_user:$target_user" "/home/coder" 2>/dev/null || true
|
||||
# Start AgentAPI as the target user
|
||||
exec sudo -u "$target_user" -E -H agentapi serve --port="$port"
|
||||
else
|
||||
echo "Running as user $(whoami) - starting AgentAPI directly"
|
||||
exec agentapi serve --port="$port"
|
||||
fi
|
||||
else
|
||||
echo "AgentAPI installation disabled, skipping start..."
|
||||
exit 0
|
||||
fi
|
||||
31
tf-dockerfile/scripts/agentapi/agentapi-wait-for-start.sh
Executable file
31
tf-dockerfile/scripts/agentapi/agentapi-wait-for-start.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
port=${1:-3284}
|
||||
|
||||
# This script waits for the agentapi server to start on the specified port.
|
||||
# It considers the server started after 3 consecutive successful responses.
|
||||
|
||||
agentapi_started=false
|
||||
|
||||
echo "Waiting for agentapi server to start on port $port..."
|
||||
for i in $(seq 1 150); do
|
||||
for j in $(seq 1 3); do
|
||||
sleep 0.1
|
||||
if curl -fs -o /dev/null "http://localhost:$port/status"; then
|
||||
echo "agentapi response received ($j/3)"
|
||||
else
|
||||
echo "agentapi server not responding ($i/15)"
|
||||
continue 2
|
||||
fi
|
||||
done
|
||||
agentapi_started=true
|
||||
break
|
||||
done
|
||||
|
||||
if [ "$agentapi_started" != "true" ]; then
|
||||
echo "Error: agentapi server did not start on port $port after 15 seconds."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "agentapi server started on port $port."
|
||||
16
tf-dockerfile/scripts/gemini-setup.sh
Executable file
16
tf-dockerfile/scripts/gemini-setup.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if ! command -v npm >/dev/null 2>&1; then
|
||||
echo 'npm not found; skipping Gemini CLI install.' >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if command -v sudo >/dev/null 2>&1; then
|
||||
sudo npm install -g @google/gemini-cli >/dev/null 2>&1 || sudo npm install -g @google/gemini-cli
|
||||
else
|
||||
npm install -g @google/gemini-cli >/dev/null 2>&1 || npm install -g @google/gemini-cli
|
||||
fi
|
||||
|
||||
echo 'Google Gemini CLI installed or already present.'
|
||||
echo 'Activate with "gemini" command in terminal.'
|
||||
@@ -4,14 +4,35 @@ set -euo pipefail
|
||||
REPO_DIR="/workspaces"
|
||||
HOOK_DIR="$REPO_DIR/.git/hooks"
|
||||
META_DIR="/tmp/git-metadata"
|
||||
TARGET_USER="${USER:-coder}"
|
||||
|
||||
if [[ ! -d "$REPO_DIR/.git" ]]; then
|
||||
echo "No Git repository found in $REPO_DIR; skipping hook install"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Ensure directories exist with proper permissions
|
||||
mkdir -p "$HOOK_DIR" "$META_DIR"
|
||||
|
||||
# If running as root, fix all permissions properly
|
||||
if [[ $EUID -eq 0 ]]; then
|
||||
echo "Running as root - fixing all git permissions"
|
||||
# Fix the entire .git directory ownership
|
||||
chown -R "$TARGET_USER:$TARGET_USER" "$REPO_DIR/.git"
|
||||
chmod -R u+rwX,go+rX "$REPO_DIR/.git"
|
||||
# Fix metadata directory
|
||||
chown -R "$TARGET_USER:$TARGET_USER" "$META_DIR"
|
||||
chmod 755 "$META_DIR"
|
||||
else
|
||||
# Try with sudo if available
|
||||
if command -v sudo >/dev/null 2>&1; then
|
||||
sudo chown -R "$TARGET_USER:$TARGET_USER" "$HOOK_DIR" "$META_DIR" 2>/dev/null || true
|
||||
sudo chmod 755 "$HOOK_DIR" "$META_DIR" 2>/dev/null || true
|
||||
else
|
||||
chmod 755 "$HOOK_DIR" "$META_DIR" 2>/dev/null || true
|
||||
fi
|
||||
fi
|
||||
|
||||
cat <<'HOOK' > "$HOOK_DIR/post-commit"
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
@@ -22,6 +43,15 @@ git branch --show-current > "$META_DIR/current-branch" 2>/dev/null || echo "mai
|
||||
git rev-parse HEAD > "$META_DIR/commit-hash" 2>/dev/null || echo "unknown" > "$META_DIR/commit-hash"
|
||||
git remote get-url origin > "$META_DIR/remote-url" 2>/dev/null || echo "no-remote" > "$META_DIR/remote-url"
|
||||
HOOK
|
||||
|
||||
# Set proper permissions on the hook file
|
||||
chmod +x "$HOOK_DIR/post-commit"
|
||||
|
||||
# Ensure proper ownership
|
||||
if [[ $EUID -eq 0 ]]; then
|
||||
chown "$TARGET_USER:$TARGET_USER" "$HOOK_DIR/post-commit"
|
||||
elif command -v sudo >/dev/null 2>&1; then
|
||||
sudo chown "$TARGET_USER:$TARGET_USER" "$HOOK_DIR/post-commit" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
echo "Git post-commit hook installed for metadata capture."
|
||||
|
||||
@@ -13,7 +13,7 @@ fi
|
||||
|
||||
SERVICES_ENABLED="${ENABLE_SERVICES:-false}"
|
||||
PGADMIN_ENABLED="${ENABLE_PGADMIN:-false}"
|
||||
JUPYTER_ENABLED="${ENABLE_JUPYTER:-false}"
|
||||
MARIMO_ENABLED="${ENABLE_MARIMO:-false}"
|
||||
|
||||
if ! command -v socat >/dev/null 2>&1; then
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
@@ -30,7 +30,7 @@ fi
|
||||
# stop previous forwards if they exist
|
||||
pkill -f "socat.*pgadmin" >/dev/null 2>&1 || true
|
||||
pkill -f "socat.*qdrant" >/dev/null 2>&1 || true
|
||||
pkill -f "socat.*jupyter" >/dev/null 2>&1 || true
|
||||
pkill -f "socat.*marimo" >/dev/null 2>&1 || true
|
||||
|
||||
if [[ "${SERVICES_ENABLED}" == "true" ]]; then
|
||||
if [[ "${PGADMIN_ENABLED}" == "true" ]]; then
|
||||
@@ -46,11 +46,11 @@ else
|
||||
echo "Database services disabled; skipping pgAdmin/Qdrant forwards"
|
||||
fi
|
||||
|
||||
if [[ "${JUPYTER_ENABLED}" == "true" ]]; then
|
||||
echo "Forwarding JupyterLab to localhost:8888"
|
||||
nohup socat TCP-LISTEN:8888,reuseaddr,fork TCP:jupyter-${WORKSPACE_ID}:8888 >/tmp/socat-jupyter.log 2>&1 &
|
||||
# Marimo runs inside workspace container, no port forwarding needed
|
||||
if [[ "${MARIMO_ENABLED}" == "true" ]]; then
|
||||
echo "Marimo running inside workspace container (no port forwarding needed)"
|
||||
else
|
||||
echo "JupyterLab disabled; skipping port forward"
|
||||
echo "Marimo disabled"
|
||||
fi
|
||||
|
||||
sleep 2
|
||||
|
||||
58
tf-dockerfile/scripts/terminal-tools.sh
Normal file
58
tf-dockerfile/scripts/terminal-tools.sh
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Installing terminal tools..."
|
||||
|
||||
# Install superfile (terminal file manager)
|
||||
echo "Installing superfile..."
|
||||
if ! command -v spf >/dev/null 2>&1; then
|
||||
curl -sL https://superfile.netlify.app/install.sh | bash
|
||||
# Add to PATH if not already there
|
||||
if ! echo "$PATH" | grep -q "$HOME/.local/bin"; then
|
||||
echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
fi
|
||||
else
|
||||
echo "superfile already installed"
|
||||
fi
|
||||
|
||||
# Install lazygit (terminal git manager)
|
||||
echo "Installing lazygit..."
|
||||
if ! command -v lazygit >/dev/null 2>&1; then
|
||||
LAZYGIT_VERSION=$(curl -s "https://api.github.com/repos/jesseduffield/lazygit/releases/latest" | grep -Po '"tag_name": "v\K[^"]*')
|
||||
curl -Lo lazygit.tar.gz "https://github.com/jesseduffield/lazygit/releases/latest/download/lazygit_${LAZYGIT_VERSION}_Linux_x86_64.tar.gz"
|
||||
tar xf lazygit.tar.gz lazygit
|
||||
# Install to user's local bin directory to avoid sudo
|
||||
mkdir -p $HOME/.local/bin
|
||||
cp lazygit $HOME/.local/bin/
|
||||
chmod +x $HOME/.local/bin/lazygit
|
||||
rm lazygit lazygit.tar.gz
|
||||
else
|
||||
echo "lazygit already installed"
|
||||
fi
|
||||
|
||||
# Install lazydocker (terminal docker manager)
|
||||
echo "Installing lazydocker..."
|
||||
if ! command -v lazydocker >/dev/null 2>&1; then
|
||||
curl https://raw.githubusercontent.com/jesseduffield/lazydocker/master/scripts/install_update_linux.sh | bash
|
||||
else
|
||||
echo "lazydocker already installed"
|
||||
fi
|
||||
|
||||
# Install btop (better htop alternative for system monitoring)
|
||||
echo "Installing btop..."
|
||||
if ! command -v btop >/dev/null 2>&1; then
|
||||
echo "Installing btop from GitHub releases..."
|
||||
BTOP_VERSION=$(curl -s "https://api.github.com/repos/aristocratos/btop/releases/latest" | grep -Po '"tag_name": "v\K[^"]*')
|
||||
curl -Lo btop.tbz "https://github.com/aristocratos/btop/releases/latest/download/btop-x86_64-linux-musl.tbz"
|
||||
tar -xjf btop.tbz
|
||||
# Install to user's local bin directory to avoid sudo
|
||||
mkdir -p $HOME/.local/bin
|
||||
cp btop/bin/btop $HOME/.local/bin/
|
||||
chmod +x $HOME/.local/bin/btop
|
||||
rm -rf btop btop.tbz
|
||||
else
|
||||
echo "btop already installed"
|
||||
fi
|
||||
|
||||
echo "Terminal tools installation completed successfully!"
|
||||
@@ -201,7 +201,7 @@ resource "docker_container" "pgadmin" {
|
||||
"PGADMIN_DEFAULT_PASSWORD=${var.pgadmin_password}",
|
||||
"PGADMIN_CONFIG_SERVER_MODE=False",
|
||||
"PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED=False",
|
||||
"PGADMIN_LISTEN_PORT=80"
|
||||
"PGADMIN_LISTEN_PORT=5050"
|
||||
]
|
||||
|
||||
networks_advanced {
|
||||
@@ -214,7 +214,7 @@ resource "docker_container" "pgadmin" {
|
||||
}
|
||||
|
||||
healthcheck {
|
||||
test = ["CMD-SHELL", "nc -z localhost 80 || exit 1"]
|
||||
test = ["CMD-SHELL", "nc -z localhost 5050 || exit 1"]
|
||||
interval = "30s"
|
||||
timeout = "10s"
|
||||
retries = 3
|
||||
@@ -234,63 +234,7 @@ resource "docker_container" "pgadmin" {
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_volume" "jupyter_data" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
name = "jupyter-data-${local.workspace_id}"
|
||||
# Jupyter now runs inside workspace container via startup_script
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "jupyter"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_container" "jupyter" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
image = "jupyter/scipy-notebook:latest"
|
||||
name = "jupyter-${local.workspace_id}"
|
||||
|
||||
env = [
|
||||
"JUPYTER_ENABLE_LAB=yes",
|
||||
"JUPYTER_TOKEN=",
|
||||
"RESTARTABLE=yes",
|
||||
"JUPYTER_PORT=8888"
|
||||
]
|
||||
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
volumes {
|
||||
volume_name = docker_volume.jupyter_data[0].name
|
||||
container_path = "/home/jovyan/work"
|
||||
}
|
||||
|
||||
volumes {
|
||||
volume_name = docker_volume.workspaces.name
|
||||
container_path = "/home/jovyan/workspaces"
|
||||
}
|
||||
|
||||
healthcheck {
|
||||
test = ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8888"]
|
||||
interval = "30s"
|
||||
timeout = "10s"
|
||||
retries = 5
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "jupyter"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
# Services now run inside the workspace container via startup_script
|
||||
# No separate containers needed for code-server or jupyter
|
||||
|
||||
@@ -114,3 +114,9 @@ variable "install_codex_support" {
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "install_gemini_support" {
|
||||
description = "Install Google Gemini CLI when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
@@ -8,45 +8,404 @@ resource "coder_agent" "main" {
|
||||
"GIT_AUTHOR_EMAIL" = local.git_author_email
|
||||
"CODER_WORKSPACE_ID" = local.workspace_id
|
||||
"CODER_WORKSPACE_REPO" = local.project_repo_url
|
||||
"POSTGRES_URL" = local.services_enabled ? local.postgres_url : ""
|
||||
"REDIS_URL" = local.services_enabled ? local.redis_url : ""
|
||||
"QDRANT_URL" = local.services_enabled ? local.qdrant_url : ""
|
||||
"ENABLE_PGADMIN" = tostring(local.pgadmin_enabled)
|
||||
"ENABLE_JUPYTER" = tostring(local.jupyter_enabled)
|
||||
"ENABLE_MARIMO" = tostring(local.marimo_enabled)
|
||||
"ENABLE_SERVICES" = tostring(local.services_enabled)
|
||||
"CODER_AGENT_BLOCK_FILE_TRANSFER" = var.block_file_transfer ? "1" : ""
|
||||
}
|
||||
|
||||
startup_script = local.agent_startup
|
||||
startup_script = <<-EOT
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# Basic setup
|
||||
${local.agent_startup}
|
||||
|
||||
# Switch to coder user for service startup
|
||||
sudo -u coder bash << 'CODER_SETUP'
|
||||
set -euo pipefail
|
||||
|
||||
# Fix RVM environment variables to suppress warnings
|
||||
export GEM_HOME="$HOME/.gem"
|
||||
export GEM_PATH="$HOME/.gem"
|
||||
|
||||
# Ensure gem directory exists
|
||||
mkdir -p "$HOME/.gem"
|
||||
|
||||
# Add RVM environment to bashrc for persistence
|
||||
if ! grep -q 'export GEM_HOME="$HOME/.gem"' ~/.bashrc; then
|
||||
echo '# RVM environment fix' >> ~/.bashrc
|
||||
echo 'export GEM_HOME="$HOME/.gem"' >> ~/.bashrc
|
||||
echo 'export GEM_PATH="$HOME/.gem"' >> ~/.bashrc
|
||||
echo 'export PATH="$GEM_HOME/bin:$PATH"' >> ~/.bashrc
|
||||
fi
|
||||
|
||||
# Also add to profile for login shells
|
||||
if ! grep -q 'export GEM_HOME="$HOME/.gem"' ~/.profile 2>/dev/null; then
|
||||
echo '# RVM environment fix' >> ~/.profile
|
||||
echo 'export GEM_HOME="$HOME/.gem"' >> ~/.profile
|
||||
echo 'export GEM_PATH="$HOME/.gem"' >> ~/.profile
|
||||
echo 'export PATH="$GEM_HOME/bin:$PATH"' >> ~/.profile
|
||||
fi
|
||||
|
||||
# Add coder user to docker group for LazyDocker access
|
||||
if ! groups | grep -q docker; then
|
||||
echo "Adding coder user to docker group..."
|
||||
sudo usermod -aG docker coder
|
||||
fi
|
||||
|
||||
# Install terminal tools if not already installed
|
||||
echo "Installing terminal tools..."
|
||||
|
||||
# Install superfile (terminal file manager)
|
||||
if ! command -v spf >/dev/null 2>&1; then
|
||||
echo "Installing superfile..."
|
||||
curl -sL https://superfile.netlify.app/install.sh | bash
|
||||
# Add to PATH if not already there
|
||||
if ! echo "$PATH" | grep -q "$HOME/.local/bin"; then
|
||||
echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
fi
|
||||
else
|
||||
echo "superfile already installed"
|
||||
fi
|
||||
|
||||
# Install lazygit (terminal git manager)
|
||||
if ! command -v lazygit >/dev/null 2>&1; then
|
||||
echo "Installing lazygit..."
|
||||
LAZYGIT_VERSION=$(curl -s "https://api.github.com/repos/jesseduffield/lazygit/releases/latest" | grep -Po '"tag_name": "v\K[^"]*')
|
||||
curl -Lo lazygit.tar.gz "https://github.com/jesseduffield/lazygit/releases/latest/download/lazygit_$${LAZYGIT_VERSION}_Linux_x86_64.tar.gz"
|
||||
tar xf lazygit.tar.gz lazygit
|
||||
# Install to user's local bin directory to avoid sudo
|
||||
mkdir -p $HOME/.local/bin
|
||||
cp lazygit $HOME/.local/bin/
|
||||
chmod +x $HOME/.local/bin/lazygit
|
||||
rm lazygit lazygit.tar.gz
|
||||
else
|
||||
echo "lazygit already installed"
|
||||
fi
|
||||
|
||||
# Install lazydocker (terminal docker manager)
|
||||
if ! command -v lazydocker >/dev/null 2>&1; then
|
||||
echo "Installing lazydocker..."
|
||||
# Install lazydocker to user's local bin directory to avoid sudo
|
||||
LAZYDOCKER_VERSION=$(curl -s "https://api.github.com/repos/jesseduffield/lazydocker/releases/latest" | grep -Po '"tag_name": "v\K[^"]*')
|
||||
curl -Lo lazydocker.tar.gz "https://github.com/jesseduffield/lazydocker/releases/latest/download/lazydocker_$${LAZYDOCKER_VERSION}_Linux_x86_64.tar.gz"
|
||||
tar xf lazydocker.tar.gz lazydocker
|
||||
mkdir -p $HOME/.local/bin
|
||||
cp lazydocker $HOME/.local/bin/
|
||||
chmod +x $HOME/.local/bin/lazydocker
|
||||
rm lazydocker lazydocker.tar.gz
|
||||
else
|
||||
echo "lazydocker already installed"
|
||||
fi
|
||||
|
||||
# Install btop (better htop alternative for system monitoring)
|
||||
if ! command -v btop >/dev/null 2>&1; then
|
||||
echo "Installing btop from GitHub releases..."
|
||||
BTOP_VERSION=$(curl -s "https://api.github.com/repos/aristocratos/btop/releases/latest" | grep -Po '"tag_name": "v\K[^"]*')
|
||||
curl -Lo btop.tbz "https://github.com/aristocratos/btop/releases/latest/download/btop-x86_64-linux-musl.tbz"
|
||||
tar -xjf btop.tbz
|
||||
# Install to user's local bin directory to avoid sudo
|
||||
mkdir -p $HOME/.local/bin
|
||||
cp btop/bin/btop $HOME/.local/bin/
|
||||
chmod +x $HOME/.local/bin/btop
|
||||
rm -rf btop btop.tbz
|
||||
else
|
||||
echo "btop already installed"
|
||||
fi
|
||||
|
||||
echo "Terminal tools installation completed!"
|
||||
|
||||
# Install and start code-server
|
||||
echo "Setting up code-server..."
|
||||
if [ ! -f /tmp/code-server/bin/code-server ]; then
|
||||
echo "Installing code-server..."
|
||||
curl -fsSL https://code-server.dev/install.sh | sh -s -- --method=standalone --prefix=/tmp/code-server
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install code-server"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Verify installation
|
||||
if [ ! -f /tmp/code-server/bin/code-server ]; then
|
||||
echo "ERROR: code-server binary not found after installation"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create config directory
|
||||
mkdir -p $HOME/.config/code-server
|
||||
cat > $HOME/.config/code-server/config.yaml << 'CONFIG'
|
||||
bind-addr: 127.0.0.1:13337
|
||||
auth: none
|
||||
cert: false
|
||||
CONFIG
|
||||
|
||||
echo "Starting code-server..."
|
||||
/tmp/code-server/bin/code-server --config $HOME/.config/code-server/config.yaml /workspaces > /tmp/code-server.log 2>&1 &
|
||||
|
||||
# Wait a moment and check if it started
|
||||
sleep 2
|
||||
if pgrep -f "code-server" > /dev/null; then
|
||||
echo "✅ code-server started successfully"
|
||||
else
|
||||
echo "❌ code-server failed to start, check /tmp/code-server.log"
|
||||
fi
|
||||
|
||||
# Install and start Marimo if enabled
|
||||
if [ "${tostring(local.marimo_enabled)}" = "true" ]; then
|
||||
echo "Installing latest Marimo with uv..."
|
||||
|
||||
# Use uv to create venv and install marimo
|
||||
export HOME=/home/coder
|
||||
export USER=coder
|
||||
cd /home/coder
|
||||
|
||||
# Install Python 3.12 if not available
|
||||
if ! command -v python3.12 >/dev/null 2>&1; then
|
||||
echo "Installing Python 3.12..."
|
||||
apt-get update -qq
|
||||
apt-get install -y software-properties-common
|
||||
add-apt-repository ppa:deadsnakes/ppa -y
|
||||
apt-get update -qq
|
||||
apt-get install -y python3.12 python3.12-venv python3.12-pip
|
||||
fi
|
||||
|
||||
# Find uv binary
|
||||
UV_BIN=""
|
||||
if command -v uv >/dev/null 2>&1; then
|
||||
UV_BIN="uv"
|
||||
elif [ -f "/home/coder/.local/bin/uv" ]; then
|
||||
UV_BIN="/home/coder/.local/bin/uv"
|
||||
elif [ -f "/usr/local/bin/uv" ]; then
|
||||
UV_BIN="/usr/local/bin/uv"
|
||||
fi
|
||||
|
||||
if [ -n "$UV_BIN" ]; then
|
||||
# Create virtual environment if it doesn't exist
|
||||
if [ ! -d "/home/coder/.venv" ]; then
|
||||
$UV_BIN venv -p python3.12 /home/coder/workspaces/.venv
|
||||
fi
|
||||
# Install marimo in the venv
|
||||
mkdir -p /home/coder/workspaces && cd /home/coder/workspaces && $UV_BIN pip install --upgrade marimo
|
||||
else
|
||||
echo "uv not found, falling back to pip"
|
||||
pip install --user --upgrade marimo
|
||||
fi
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install Marimo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure .local/bin is in PATH
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
|
||||
# Add to bashrc for persistence
|
||||
if ! grep -q 'export PATH="$HOME/.local/bin:$PATH"' ~/.bashrc; then
|
||||
echo 'export PATH="$HOME/.local/bin:$PATH"' >> ~/.bashrc
|
||||
fi
|
||||
|
||||
# Verify installation
|
||||
if [ ! -f "$HOME/.local/bin/marimo" ]; then
|
||||
echo "ERROR: marimo binary not found after installation"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create a simple marimo notebook directory structure
|
||||
mkdir -p ~/workspaces/notebooks
|
||||
|
||||
# Create a basic marimo app if none exists
|
||||
if [ ! -f "~/workspaces/notebooks/welcome.py" ]; then
|
||||
cat > ~/workspaces/notebooks/welcome.py << 'MARIMO_APP'
|
||||
import marimo
|
||||
|
||||
__generated_with = "0.16.0"
|
||||
app = marimo.App()
|
||||
|
||||
@app.cell
|
||||
def __():
|
||||
import marimo as mo
|
||||
return mo,
|
||||
|
||||
@app.cell
|
||||
def __(mo):
|
||||
mo.md("# Welcome to Marimo!")
|
||||
return
|
||||
|
||||
@app.cell
|
||||
def __(mo):
|
||||
mo.md("This is your interactive notebook environment.")
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
||||
MARIMO_APP
|
||||
fi
|
||||
|
||||
echo "Starting Marimo..."
|
||||
# Kill any existing marimo processes first
|
||||
pkill -f marimo || true
|
||||
|
||||
# Start marimo with proper environment and activate venv
|
||||
export HOME=/home/coder
|
||||
export USER=coder
|
||||
export PATH="/home/coder/.venv/bin:/home/coder/.local/bin:$PATH"
|
||||
cd /home/coder
|
||||
# Activate virtual environment if it exists
|
||||
if [ -f "/home/coder/.venv/bin/activate" ]; then
|
||||
source /home/coder/.venv/bin/activate
|
||||
fi
|
||||
nohup marimo edit --headless --host 0.0.0.0 --port 8888 > /tmp/marimo.log 2>&1 &
|
||||
|
||||
# Wait a moment and check if it started
|
||||
sleep 3
|
||||
if pgrep -f "marimo" > /dev/null; then
|
||||
echo "✅ Marimo started successfully"
|
||||
else
|
||||
echo "❌ Marimo failed to start, check /tmp/marimo.log"
|
||||
cat /tmp/marimo.log
|
||||
fi
|
||||
else
|
||||
echo "Marimo disabled (enable_marimo = ${tostring(local.marimo_enabled)})"
|
||||
fi
|
||||
|
||||
echo "Services started successfully"
|
||||
CODER_SETUP
|
||||
|
||||
echo "Agent startup completed"
|
||||
EOT
|
||||
|
||||
metadata {
|
||||
display_name = "CPU Usage"
|
||||
key = "0_cpu_usage"
|
||||
script = "coder stat cpu 2>/dev/null || echo 'n/a'"
|
||||
script = <<-EOT
|
||||
# Get comprehensive CPU usage breakdown
|
||||
container_cpu="n/a"
|
||||
vm_cpu="n/a"
|
||||
device_cores="n/a"
|
||||
|
||||
# Container CPU (if available)
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
container_cpu=$(docker stats --no-stream --format "{{.CPUPerc}}" $(hostname) 2>/dev/null || echo "n/a")
|
||||
fi
|
||||
|
||||
# VM CPU from /proc/stat
|
||||
vm_cpu=$(awk '/cpu /{u=$2+$4; t=$2+$3+$4+$5; if (NR==1){u1=u; t1=t;} else printf "%.1f%%", (u-u1) * 100 / (t-t1); }' \
|
||||
<(grep 'cpu ' /proc/stat) <(sleep 1; grep 'cpu ' /proc/stat) 2>/dev/null || echo "n/a")
|
||||
|
||||
# Device cores
|
||||
device_cores=$(nproc 2>/dev/null || echo "n/a")
|
||||
|
||||
# Format output on single line
|
||||
if [ "$container_cpu" != "n/a" ]; then
|
||||
echo "C:$container_cpu | Device:$${device_cores}c"
|
||||
else
|
||||
echo "VM:$vm_cpu | Device:$${device_cores}c"
|
||||
fi
|
||||
EOT
|
||||
interval = 30
|
||||
timeout = 5
|
||||
timeout = 15
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Memory Usage"
|
||||
key = "1_memory_usage"
|
||||
script = "coder stat mem 2>/dev/null || echo 'n/a'"
|
||||
script = <<-EOT
|
||||
# Get comprehensive memory usage breakdown
|
||||
container_mem="n/a"
|
||||
vm_mem="n/a"
|
||||
device_total="128GB"
|
||||
device_vram="64GB"
|
||||
|
||||
# Container memory (if available)
|
||||
if command -v docker >/dev/null 2>&1; then
|
||||
container_mem=$(docker stats --no-stream --format "{{.MemUsage}}" $(hostname) 2>/dev/null || echo "n/a")
|
||||
fi
|
||||
|
||||
# VM memory from /proc/meminfo (should show ~32GB allocated to VM)
|
||||
vm_mem=$(awk '/^MemTotal:/{total=$2} /^MemAvailable:/{avail=$2} END{used=total-avail; printf "%.2f/%.2f GB", used/1024/1024, total/1024/1024}' /proc/meminfo 2>/dev/null || echo "n/a")
|
||||
|
||||
# Format output on single line
|
||||
if [ "$container_mem" != "n/a" ]; then
|
||||
echo "C:$container_mem | Device:$device_total ($device_vram VRAM)"
|
||||
else
|
||||
echo "VM:$vm_mem | Device:$device_total ($device_vram VRAM)"
|
||||
fi
|
||||
EOT
|
||||
interval = 30
|
||||
timeout = 5
|
||||
timeout = 15
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Disk Usage"
|
||||
key = "2_disk_usage"
|
||||
script = "df -h /workspaces 2>/dev/null | awk 'NR==2 {print $5}' || echo 'n/a'"
|
||||
script = <<-EOT
|
||||
# Get comprehensive disk usage breakdown
|
||||
workspace_disk="n/a"
|
||||
home_disk="n/a"
|
||||
root_disk="n/a"
|
||||
|
||||
# Workspace volume (/workspaces)
|
||||
if [ -d "/workspaces" ]; then
|
||||
workspace_disk=$(df -BG /workspaces 2>/dev/null | awk 'NR==2 {
|
||||
used = $3; gsub(/G/, "", used);
|
||||
total = $2; gsub(/G/, "", total);
|
||||
printf "%dGB/%dGB", used, total
|
||||
}' || echo "n/a")
|
||||
fi
|
||||
|
||||
# Home volume (/home) - should be ~1TB
|
||||
if [ -d "/home" ]; then
|
||||
home_disk=$(df -BG /home 2>/dev/null | awk 'NR==2 {
|
||||
used = $3; gsub(/G/, "", used);
|
||||
total = $2; gsub(/G/, "", total);
|
||||
printf "%dGB/%dTB", used, int(total/1000)
|
||||
}' || echo "n/a")
|
||||
fi
|
||||
|
||||
# Root filesystem (/) - should be ~98GB
|
||||
root_disk=$(df -BG / 2>/dev/null | awk 'NR==2 {
|
||||
used = $3; gsub(/G/, "", used);
|
||||
total = $2; gsub(/G/, "", total);
|
||||
printf "%dGB/%dGB", used, total
|
||||
}' || echo "n/a")
|
||||
|
||||
# Format output on single line
|
||||
if [ "$workspace_disk" != "n/a" ] && [ "$home_disk" != "n/a" ]; then
|
||||
echo "W:$workspace_disk | H:$home_disk"
|
||||
elif [ "$workspace_disk" != "n/a" ]; then
|
||||
echo "W:$workspace_disk | Root:$root_disk"
|
||||
elif [ "$home_disk" != "n/a" ]; then
|
||||
echo "Root:$root_disk | H:$home_disk"
|
||||
else
|
||||
echo "Root:$root_disk"
|
||||
fi
|
||||
EOT
|
||||
interval = 300
|
||||
timeout = 10
|
||||
timeout = 15
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Git Branch"
|
||||
key = "3_git_branch"
|
||||
script = "cd /workspaces && git branch --show-current 2>/dev/null || echo 'no-repo'"
|
||||
script = <<-EOT
|
||||
# Check for git repository dynamically
|
||||
if [ -n "$CODER_WORKSPACE_REPO" ] && [ "$CODER_WORKSPACE_REPO" != "" ]; then
|
||||
# Extract repo name from URL and look for it
|
||||
repo_name=$(basename "$CODER_WORKSPACE_REPO" .git)
|
||||
if [ -d "/workspaces/$repo_name/.git" ]; then
|
||||
cd "/workspaces/$repo_name" && git branch --show-current 2>/dev/null || echo 'detached'
|
||||
elif [ -d "/workspaces/.git" ]; then
|
||||
cd "/workspaces" && git branch --show-current 2>/dev/null || echo 'detached'
|
||||
else
|
||||
echo 'no-repo'
|
||||
fi
|
||||
else
|
||||
# Fallback to checking workspace root
|
||||
cd /workspaces && git branch --show-current 2>/dev/null || echo 'no-repo'
|
||||
fi
|
||||
EOT
|
||||
interval = 300
|
||||
timeout = 5
|
||||
}
|
||||
@@ -84,12 +443,20 @@ resource "docker_container" "workspace" {
|
||||
|
||||
memory = var.workspace_memory_limit > 0 ? var.workspace_memory_limit * 1024 * 1024 : null
|
||||
|
||||
# Enable privileged mode for full system access
|
||||
privileged = true
|
||||
|
||||
# Run as root to avoid permission issues
|
||||
user = "root"
|
||||
|
||||
env = compact([
|
||||
"GIT_AUTHOR_NAME=${local.git_author_name}",
|
||||
"GIT_AUTHOR_EMAIL=${local.git_author_email}",
|
||||
"CODER_AGENT_TOKEN=${coder_agent.main.token}",
|
||||
"CODER_AGENT_DEVCONTAINERS_ENABLE=true",
|
||||
local.project_repo_url != "" ? "CODER_WORKSPACE_REPO=${local.project_repo_url}" : "",
|
||||
"HOME=/home/coder",
|
||||
"USER=coder",
|
||||
])
|
||||
|
||||
networks_advanced {
|
||||
@@ -125,7 +492,15 @@ resource "docker_container" "workspace" {
|
||||
}
|
||||
|
||||
working_dir = "/workspaces"
|
||||
command = ["/bin/bash", "-c", "${coder_agent.main.init_script} && sleep infinity"]
|
||||
command = ["/bin/bash", "-c", <<-EOT
|
||||
# Run init script as root to handle permissions
|
||||
${coder_agent.main.init_script}
|
||||
|
||||
# Switch to coder user for ongoing operations
|
||||
echo "Switching to coder user for services..."
|
||||
exec sudo -u coder -i bash -c 'cd /workspaces && sleep infinity'
|
||||
EOT
|
||||
]
|
||||
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
@@ -175,74 +550,135 @@ module "pycharm_desktop" {
|
||||
slug = "pycharm-gateway"
|
||||
}
|
||||
|
||||
module "claude_code" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/claude-code/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
workdir = "/workspaces"
|
||||
group = "AI Tools"
|
||||
order = 30
|
||||
install_claude_code = false
|
||||
install_agentapi = false
|
||||
pre_install_script = <<-EOT
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
# module "claude_code" {
|
||||
# count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
# source = "registry.coder.com/coder/claude-code/coder"
|
||||
# agent_id = coder_agent.main.id
|
||||
# workdir = "/workspaces"
|
||||
# group = "AI Tools"
|
||||
# order = 30
|
||||
# install_claude_code = false
|
||||
# install_agentapi = true
|
||||
# pre_install_script = <<-EOT
|
||||
# #!/usr/bin/env bash
|
||||
# set -euo pipefail
|
||||
#
|
||||
# # We're running as root, so set up the coder user properly
|
||||
# target_user="coder"
|
||||
# target_home="/home/coder"
|
||||
#
|
||||
# # Ensure coder user exists
|
||||
# if ! id "$target_user" >/dev/null 2>&1; then
|
||||
# useradd -m -s /bin/bash "$target_user" || true
|
||||
# fi
|
||||
#
|
||||
# # Create home directory if it doesn't exist
|
||||
# mkdir -p "$target_home"
|
||||
# chown "$target_user:$target_user" "$target_home"
|
||||
#
|
||||
# # Setup passwordless sudo for coder user
|
||||
# echo "$target_user ALL=(ALL) NOPASSWD:ALL" > "/etc/sudoers.d/$target_user"
|
||||
# chmod 440 "/etc/sudoers.d/$target_user"
|
||||
# echo "Passwordless sudo configured for $target_user"
|
||||
#
|
||||
# # Ensure proper ownership of important directories
|
||||
# mkdir -p "$target_home/.local/bin"
|
||||
# chown -R "$target_user:$target_user" "$target_home/.local"
|
||||
#
|
||||
# real_curl="$(command -v curl || true)"
|
||||
# wrapper=/usr/local/bin/curl
|
||||
# if [ -n "$real_curl" ]; then
|
||||
# if ! curl --help 2>/dev/null | grep -q -- "--retry-all-errors"; then
|
||||
# real_curl="$(readlink -f "$real_curl" 2>/dev/null || echo "$real_curl")"
|
||||
# if [ "$real_curl" != "$wrapper" ]; then
|
||||
# python3 - <<'PY' "$real_curl" "$wrapper"
|
||||
# import os, sys, stat
|
||||
# real=sys.argv[1]; wrapper=sys.argv[2]
|
||||
# code=f"""#!/usr/bin/env python3
|
||||
# import os, sys
|
||||
# real={real!r}
|
||||
# args=[a for a in sys.argv[1:] if a != "--retry-all-errors"]
|
||||
# os.execv(real,[real]+args)
|
||||
# """
|
||||
# with open(wrapper,'w', encoding='utf-8') as f:
|
||||
# f.write(code)
|
||||
# os.chmod(wrapper, 0o755)
|
||||
# PY
|
||||
# fi
|
||||
# fi
|
||||
# fi
|
||||
#
|
||||
# agentapi_version="v0.7.1"
|
||||
# bin_dir="$HOME/.local/bin"
|
||||
# mkdir -p "$bin_dir"
|
||||
#
|
||||
# # Ensure agentapi scripts directory exists
|
||||
# agentapi_scripts_dir="$HOME/code-tools/tf/scripts/agentapi"
|
||||
# if [ ! -d "$agentapi_scripts_dir" ]; then
|
||||
# echo "Creating agentapi scripts directory: $agentapi_scripts_dir"
|
||||
# mkdir -p "$agentapi_scripts_dir"
|
||||
# # Create minimal placeholder scripts if they don't exist
|
||||
# if [ ! -f "$agentapi_scripts_dir/agentapi-start.sh" ]; then
|
||||
# cat > "$agentapi_scripts_dir/agentapi-start.sh" << 'SCRIPT_EOF'
|
||||
# #!/bin/bash
|
||||
# echo "AgentAPI start script placeholder"
|
||||
# SCRIPT_EOF
|
||||
# chmod +x "$agentapi_scripts_dir/agentapi-start.sh"
|
||||
# fi
|
||||
# fi
|
||||
#
|
||||
# ensure_agentapi() {
|
||||
# if command -v agentapi >/dev/null 2>&1; then
|
||||
# return 0
|
||||
# fi
|
||||
#
|
||||
# arch=$(uname -m)
|
||||
# case "$arch" in
|
||||
# x86_64|amd64)
|
||||
# asset="agentapi-linux-amd64"
|
||||
# ;;
|
||||
# aarch64|arm64)
|
||||
# asset="agentapi-linux-arm64"
|
||||
# ;;
|
||||
# *)
|
||||
# echo "warning: unsupported architecture $arch; skipping agentapi bootstrap" >&2
|
||||
# return 1
|
||||
# ;;
|
||||
# esac
|
||||
#
|
||||
# if [ "$agentapi_version" = "latest" ]; then
|
||||
# url="https://github.com/coder/agentapi/releases/latest/download/$${asset}"
|
||||
# else
|
||||
# url="https://github.com/coder/agentapi/releases/download/$${agentapi_version}/$${asset}"
|
||||
# fi
|
||||
#
|
||||
# tmp_file=$(mktemp)
|
||||
# if ! curl -fsSL "$url" -o "$tmp_file"; then
|
||||
# echo "warning: failed to download agentapi from $url" >&2
|
||||
# rm -f "$tmp_file"
|
||||
# return 1
|
||||
# fi
|
||||
#
|
||||
# if command -v install >/dev/null 2>&1; then
|
||||
# install -m 0755 "$tmp_file" "$bin_dir/agentapi"
|
||||
# else
|
||||
# mv "$tmp_file" "$bin_dir/agentapi"
|
||||
# chmod 0755 "$bin_dir/agentapi"
|
||||
# fi
|
||||
# rm -f "$tmp_file"
|
||||
# echo "Installed agentapi CLI into $bin_dir/agentapi"
|
||||
# }
|
||||
#
|
||||
# ensure_agentapi
|
||||
#
|
||||
# if ! command -v claude >/dev/null 2>&1; then
|
||||
# echo "warning: claude CLI not found; expected pre-installed in base image." >&2
|
||||
# fi
|
||||
#
|
||||
# case ":$PATH:" in
|
||||
# *:"$bin_dir":*) ;;
|
||||
# *) echo "PATH does not include $bin_dir; adding for current session." >&2; export PATH="$bin_dir:$PATH" ;;
|
||||
# esac
|
||||
# EOT
|
||||
# }
|
||||
|
||||
agentapi_version="v0.7.1"
|
||||
bin_dir="$HOME/.local/bin"
|
||||
mkdir -p "$bin_dir"
|
||||
|
||||
ensure_agentapi() {
|
||||
if command -v agentapi >/dev/null 2>&1; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
arch=$(uname -m)
|
||||
case "$arch" in
|
||||
x86_64|amd64)
|
||||
asset="agentapi-linux-amd64"
|
||||
;;
|
||||
aarch64|arm64)
|
||||
asset="agentapi-linux-arm64"
|
||||
;;
|
||||
*)
|
||||
echo "warning: unsupported architecture $arch; skipping agentapi bootstrap" >&2
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ "$agentapi_version" = "latest" ]; then
|
||||
url="https://github.com/coder/agentapi/releases/latest/download/$${asset}"
|
||||
else
|
||||
url="https://github.com/coder/agentapi/releases/download/$${agentapi_version}/$${asset}"
|
||||
fi
|
||||
|
||||
tmp_file=$(mktemp)
|
||||
if ! curl -fsSL "$url" -o "$tmp_file"; then
|
||||
echo "warning: failed to download agentapi from $url" >&2
|
||||
rm -f "$tmp_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if command -v install >/dev/null 2>&1; then
|
||||
install -m 0755 "$tmp_file" "$bin_dir/agentapi"
|
||||
else
|
||||
mv "$tmp_file" "$bin_dir/agentapi"
|
||||
chmod 0755 "$bin_dir/agentapi"
|
||||
fi
|
||||
rm -f "$tmp_file"
|
||||
echo "Installed agentapi CLI into $bin_dir/agentapi"
|
||||
}
|
||||
|
||||
ensure_agentapi
|
||||
|
||||
if ! command -v claude >/dev/null 2>&1; then
|
||||
echo "warning: claude CLI not found; expected pre-installed in base image." >&2
|
||||
fi
|
||||
|
||||
case ":$PATH:" in
|
||||
*:"$bin_dir":*) ;;
|
||||
*) echo "PATH does not include $bin_dir; adding for current session." >&2; export PATH="$bin_dir:$PATH" ;;
|
||||
esac
|
||||
EOT
|
||||
}
|
||||
|
||||
67
tf/.terraform.lock.hcl
generated
67
tf/.terraform.lock.hcl
generated
@@ -1,67 +0,0 @@
|
||||
# This file is maintained automatically by "terraform init".
|
||||
# Manual edits may be lost in future updates.
|
||||
|
||||
provider "registry.terraform.io/coder/coder" {
|
||||
version = "2.10.1"
|
||||
constraints = ">= 0.17.0, ~> 2.0"
|
||||
hashes = [
|
||||
"h1:X/5i3/1fFLc/WbWHRhJERBVGUd1oGCrXPKjIwP4qMFY=",
|
||||
"zh:037ef9d6b5dfebb2aa1f81ab87d07946ba2bb8fe4819ae02b1046b70c42bed38",
|
||||
"zh:17125eb205a4341eedbea00e53a9c2c9e3396799e1b5d7865dd4d3261369707b",
|
||||
"zh:1c482c03c47817a02b3829caeb7f3c757d82aedbd0f200c71b946a23d14b3bb0",
|
||||
"zh:1de27897786d81528ed321ba84ec4cde05b381ba192613b78aefc39019c1adca",
|
||||
"zh:4667e4c64961fe03b502b27b9f15c2360b1f0f213626b6b473f49f2ef65070ea",
|
||||
"zh:4788fd6930f7ea984daaf244f8206faabf4caede21a05701802bfabf73cf4298",
|
||||
"zh:6572294a08a3bc9b12294b406173f63e9927252cb2795f43ece8dd9e9cfefe2c",
|
||||
"zh:70f33963c020ebe571354a40f33332c904faa5c3452eeb6a2794b8012ec558cc",
|
||||
"zh:7bf3587a24bddce84d619a111583a728cf35f0d792459f3051a1ec71d9bf8f0f",
|
||||
"zh:a42c73f2e21e935cd95430ae1eb770c6b3a17e2f4dce1379d7e2ca98b099c2fa",
|
||||
"zh:a82a6899f425ab5196f2a7732044f3b05ffafc6c26a03e6c4899d1caef56f9dd",
|
||||
"zh:d3f0e48d5dc7a415cb6a355f8ff985ed126a490690ced29bdb54fc0eab3d7bea",
|
||||
"zh:e45312311dd7371d2ebb3d7113016fa6e00e94877bbcef34bab53db94b49937d",
|
||||
"zh:f3b64f9c840e92c3950d31e7b34a5aa27d10b65e39185da17b268928047426f7",
|
||||
"zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/http" {
|
||||
version = "3.5.0"
|
||||
constraints = ">= 3.0.0"
|
||||
hashes = [
|
||||
"h1:8bUoPwS4hahOvzCBj6b04ObLVFXCEmEN8T/5eOHmWOM=",
|
||||
"zh:047c5b4920751b13425efe0d011b3a23a3be97d02d9c0e3c60985521c9c456b7",
|
||||
"zh:157866f700470207561f6d032d344916b82268ecd0cf8174fb11c0674c8d0736",
|
||||
"zh:1973eb9383b0d83dd4fd5e662f0f16de837d072b64a6b7cd703410d730499476",
|
||||
"zh:212f833a4e6d020840672f6f88273d62a564f44acb0c857b5961cdb3bbc14c90",
|
||||
"zh:2c8034bc039fffaa1d4965ca02a8c6d57301e5fa9fff4773e684b46e3f78e76a",
|
||||
"zh:5df353fc5b2dd31577def9cc1a4ebf0c9a9c2699d223c6b02087a3089c74a1c6",
|
||||
"zh:672083810d4185076c81b16ad13d1224b9e6ea7f4850951d2ab8d30fa6e41f08",
|
||||
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
|
||||
"zh:7b4200f18abdbe39904b03537e1a78f21ebafe60f1c861a44387d314fda69da6",
|
||||
"zh:843feacacd86baed820f81a6c9f7bd32cf302db3d7a0f39e87976ebc7a7cc2ee",
|
||||
"zh:a9ea5096ab91aab260b22e4251c05f08dad2ed77e43e5e4fadcdfd87f2c78926",
|
||||
"zh:d02b288922811739059e90184c7f76d45d07d3a77cc48d0b15fd3db14e928623",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/kreuzwerker/docker" {
|
||||
version = "2.25.0"
|
||||
constraints = "~> 2.25"
|
||||
hashes = [
|
||||
"h1:nB2atWOMNrq3tfVH216oFFCQ/TNjAXXno6ZyZhlGdQs=",
|
||||
"zh:02ca00d987b2e56195d2e97d82349f680d4b94a6a0d514dc6c0031317aec4f11",
|
||||
"zh:432d333412f01b7547b3b264ec85a2627869fdf5f75df9d237b0dc6a6848b292",
|
||||
"zh:4709e81fea2b9132020d6c786a1d1d02c77254fc0e299ea1bb636892b6cadac6",
|
||||
"zh:53c4a4ab59a1e0671d2292d74f14e060489482d430ad811016bf7cb95503c5de",
|
||||
"zh:6c0865e514ceffbf19ace806fb4595bf05d0a165dd9c8664f8768da385ccc091",
|
||||
"zh:6d72716d58b8c18cd0b223265b2a190648a14973223cc198a019b300ede07570",
|
||||
"zh:a710ce90557c54396dfc27b282452a8f5373eb112a10e9fd77043ca05d30e72f",
|
||||
"zh:e0868c7ac58af596edfa578473013bd550e40c0a1f6adc2c717445ebf9fd694e",
|
||||
"zh:e2ab2c40631f100130e7b525e07be7a9b8d8fcb8f57f21dca235a3e15818636b",
|
||||
"zh:e40c93b1d99660f92dd0c75611bcb9e68ae706d4c0bc6fac32f672e19e6f05bf",
|
||||
"zh:e480501b2dd1399135ec7eb820e1be88f9381d32c4df093f2f4645863f8c48f4",
|
||||
"zh:f1a71e90aa388d34691595883f6526543063f8e338792b7c2c003b2c8c63d108",
|
||||
"zh:f346cd5d25a31991487ca5dc7a05e104776c3917482bc2a24ec6a90bb697b22e",
|
||||
"zh:fa822a4eb4e6385e88fbb133fd63d3a953693712a7adeb371913a2d477c0148c",
|
||||
]
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
---
|
||||
display_name: JetBrains Gateway
|
||||
description: Add a one-click button to launch JetBrains Gateway IDEs in the dashboard.
|
||||
icon: ../.icons/gateway.svg
|
||||
maintainer_github: coder
|
||||
verified: true
|
||||
tags: [ide, jetbrains, helper, parameter]
|
||||
---
|
||||
|
||||
# JetBrains Gateway
|
||||
|
||||
This module adds a JetBrains Gateway Button to open any workspace with a single click.
|
||||
|
||||
JetBrains recommends a minimum of 4 CPU cores and 8GB of RAM.
|
||||
Consult the [JetBrains documentation](https://www.jetbrains.com/help/idea/prerequisites.html#min_requirements) to confirm other system requirements.
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.28"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["CL", "GO", "IU", "PY", "WS"]
|
||||
default = "GO"
|
||||
}
|
||||
```
|
||||
|
||||

|
||||
|
||||
## Examples
|
||||
|
||||
### Add GoLand and WebStorm as options with the default set to GoLand
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.28"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["GO", "WS"]
|
||||
default = "GO"
|
||||
}
|
||||
```
|
||||
|
||||
### Use the latest version of each IDE
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.28"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["IU", "PY"]
|
||||
default = "IU"
|
||||
latest = true
|
||||
}
|
||||
```
|
||||
|
||||
### Use fixed versions set by `jetbrains_ide_versions`
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.28"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["IU", "PY"]
|
||||
default = "IU"
|
||||
latest = false
|
||||
jetbrains_ide_versions = {
|
||||
"IU" = {
|
||||
build_number = "243.21565.193"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PY" = {
|
||||
build_number = "243.21565.199"
|
||||
version = "2024.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Use the latest EAP version
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.28"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["GO", "WS"]
|
||||
default = "GO"
|
||||
latest = true
|
||||
channel = "eap"
|
||||
}
|
||||
```
|
||||
|
||||
### Custom base link
|
||||
|
||||
Due to the highest priority of the `ide_download_link` parameter in the `(jetbrains-gateway://...` within IDEA, the pre-configured download address will be overridden when using [IDEA's offline mode](https://www.jetbrains.com/help/idea/fully-offline-mode.html). Therefore, it is necessary to configure the `download_base_link` parameter for the `jetbrains_gateway` module to change the value of `ide_download_link`.
|
||||
|
||||
```tf
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.28"
|
||||
agent_id = coder_agent.example.id
|
||||
folder = "/home/coder/example"
|
||||
jetbrains_ides = ["GO", "WS"]
|
||||
releases_base_link = "https://releases.internal.site/"
|
||||
download_base_link = "https://download.internal.site/"
|
||||
default = "GO"
|
||||
}
|
||||
```
|
||||
|
||||
## Supported IDEs
|
||||
|
||||
This module and JetBrains Gateway support the following JetBrains IDEs:
|
||||
|
||||
- [GoLand (`GO`)](https://www.jetbrains.com/go/)
|
||||
- [WebStorm (`WS`)](https://www.jetbrains.com/webstorm/)
|
||||
- [IntelliJ IDEA Ultimate (`IU`)](https://www.jetbrains.com/idea/)
|
||||
- [PyCharm Professional (`PY`)](https://www.jetbrains.com/pycharm/)
|
||||
- [PhpStorm (`PS`)](https://www.jetbrains.com/phpstorm/)
|
||||
- [CLion (`CL`)](https://www.jetbrains.com/clion/)
|
||||
- [RubyMine (`RM`)](https://www.jetbrains.com/ruby/)
|
||||
- [Rider (`RD`)](https://www.jetbrains.com/rider/)
|
||||
- [RustRover (`RR`)](https://www.jetbrains.com/rust/)
|
||||
@@ -1,43 +0,0 @@
|
||||
import { it, expect, describe } from "bun:test";
|
||||
import {
|
||||
runTerraformInit,
|
||||
testRequiredVariables,
|
||||
runTerraformApply,
|
||||
} from "../test";
|
||||
|
||||
describe("jetbrains-gateway", async () => {
|
||||
await runTerraformInit(import.meta.dir);
|
||||
|
||||
await testRequiredVariables(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
folder: "/home/foo",
|
||||
});
|
||||
|
||||
it("should create a link with the default values", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
// These are all required.
|
||||
agent_id: "foo",
|
||||
folder: "/home/coder",
|
||||
});
|
||||
expect(state.outputs.url.value).toBe(
|
||||
"jetbrains-gateway://connect#type=coder&workspace=default&owner=default&folder=/home/coder&url=https://mydeployment.coder.com&token=$SESSION_TOKEN&ide_product_code=IU&ide_build_number=243.21565.193&ide_download_link=https://download.jetbrains.com/idea/ideaIU-2024.3.tar.gz",
|
||||
);
|
||||
|
||||
const coder_app = state.resources.find(
|
||||
(res) => res.type === "coder_app" && res.name === "gateway",
|
||||
);
|
||||
|
||||
expect(coder_app).not.toBeNull();
|
||||
expect(coder_app?.instances.length).toBe(1);
|
||||
expect(coder_app?.instances[0].attributes.order).toBeNull();
|
||||
});
|
||||
|
||||
it("default to first ide", async () => {
|
||||
const state = await runTerraformApply(import.meta.dir, {
|
||||
agent_id: "foo",
|
||||
folder: "/home/foo",
|
||||
jetbrains_ides: '["IU", "GO", "PY"]',
|
||||
});
|
||||
expect(state.outputs.identifier.value).toBe("IU");
|
||||
});
|
||||
});
|
||||
@@ -1,341 +0,0 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 0.17"
|
||||
}
|
||||
http = {
|
||||
source = "hashicorp/http"
|
||||
version = ">= 3.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "agent_id" {
|
||||
type = string
|
||||
description = "The ID of a Coder agent."
|
||||
}
|
||||
|
||||
variable "slug" {
|
||||
type = string
|
||||
description = "The slug for the coder_app. Allows resuing the module with the same template."
|
||||
default = "gateway"
|
||||
}
|
||||
|
||||
variable "agent_name" {
|
||||
type = string
|
||||
description = "Agent name. (unused). Will be removed in a future version"
|
||||
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "folder" {
|
||||
type = string
|
||||
description = "The directory to open in the IDE. e.g. /home/coder/project"
|
||||
validation {
|
||||
condition = can(regex("^(?:/[^/]+)+$", var.folder))
|
||||
error_message = "The folder must be a full path and must not start with a ~."
|
||||
}
|
||||
}
|
||||
|
||||
variable "default" {
|
||||
default = ""
|
||||
type = string
|
||||
description = "Default IDE"
|
||||
}
|
||||
|
||||
variable "order" {
|
||||
type = number
|
||||
description = "The order determines the position of app in the UI presentation. The lowest order is shown first and apps with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "coder_parameter_order" {
|
||||
type = number
|
||||
description = "The order determines the position of a template parameter in the UI/CLI presentation. The lowest order is shown first and parameters with equal order are sorted by name (ascending order)."
|
||||
default = null
|
||||
}
|
||||
|
||||
variable "latest" {
|
||||
type = bool
|
||||
description = "Whether to fetch the latest version of the IDE."
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "channel" {
|
||||
type = string
|
||||
description = "JetBrains IDE release channel. Valid values are release and eap."
|
||||
default = "release"
|
||||
validation {
|
||||
condition = can(regex("^(release|eap)$", var.channel))
|
||||
error_message = "The channel must be either release or eap."
|
||||
}
|
||||
}
|
||||
|
||||
variable "jetbrains_ide_versions" {
|
||||
type = map(object({
|
||||
build_number = string
|
||||
version = string
|
||||
}))
|
||||
description = "The set of versions for each jetbrains IDE"
|
||||
default = {
|
||||
"IU" = {
|
||||
build_number = "243.21565.193"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PS" = {
|
||||
build_number = "243.21565.202"
|
||||
version = "2024.3"
|
||||
}
|
||||
"WS" = {
|
||||
build_number = "243.21565.180"
|
||||
version = "2024.3"
|
||||
}
|
||||
"PY" = {
|
||||
build_number = "243.21565.199"
|
||||
version = "2024.3"
|
||||
}
|
||||
"CL" = {
|
||||
build_number = "243.21565.238"
|
||||
version = "2024.1"
|
||||
}
|
||||
"GO" = {
|
||||
build_number = "243.21565.208"
|
||||
version = "2024.3"
|
||||
}
|
||||
"RM" = {
|
||||
build_number = "243.21565.197"
|
||||
version = "2024.3"
|
||||
}
|
||||
"RD" = {
|
||||
build_number = "243.21565.191"
|
||||
version = "2024.3"
|
||||
}
|
||||
"RR" = {
|
||||
build_number = "243.22562.230"
|
||||
version = "2024.3"
|
||||
}
|
||||
}
|
||||
validation {
|
||||
condition = (
|
||||
alltrue([
|
||||
for code in keys(var.jetbrains_ide_versions) : contains(["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"], code)
|
||||
])
|
||||
)
|
||||
error_message = "The jetbrains_ide_versions must contain a map of valid product codes. Valid product codes are ${join(",", ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"])}."
|
||||
}
|
||||
}
|
||||
|
||||
variable "jetbrains_ides" {
|
||||
type = list(string)
|
||||
description = "The list of IDE product codes."
|
||||
default = ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"]
|
||||
validation {
|
||||
condition = (
|
||||
alltrue([
|
||||
for code in var.jetbrains_ides : contains(["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"], code)
|
||||
])
|
||||
)
|
||||
error_message = "The jetbrains_ides must be a list of valid product codes. Valid product codes are ${join(",", ["IU", "PS", "WS", "PY", "CL", "GO", "RM", "RD", "RR"])}."
|
||||
}
|
||||
# check if the list is empty
|
||||
validation {
|
||||
condition = length(var.jetbrains_ides) > 0
|
||||
error_message = "The jetbrains_ides must not be empty."
|
||||
}
|
||||
# check if the list contains duplicates
|
||||
validation {
|
||||
condition = length(var.jetbrains_ides) == length(toset(var.jetbrains_ides))
|
||||
error_message = "The jetbrains_ides must not contain duplicates."
|
||||
}
|
||||
}
|
||||
|
||||
variable "releases_base_link" {
|
||||
type = string
|
||||
description = ""
|
||||
default = "https://data.services.jetbrains.com"
|
||||
validation {
|
||||
condition = can(regex("^https?://.+$", var.releases_base_link))
|
||||
error_message = "The releases_base_link must be a valid HTTP/S address."
|
||||
}
|
||||
}
|
||||
|
||||
variable "download_base_link" {
|
||||
type = string
|
||||
description = ""
|
||||
default = "https://download.jetbrains.com"
|
||||
validation {
|
||||
condition = can(regex("^https?://.+$", var.download_base_link))
|
||||
error_message = "The download_base_link must be a valid HTTP/S address."
|
||||
}
|
||||
}
|
||||
|
||||
data "http" "jetbrains_ide_versions" {
|
||||
for_each = var.latest ? toset(var.jetbrains_ides) : toset([])
|
||||
url = "${var.releases_base_link}/products/releases?code=${each.key}&latest=true&type=${var.channel}"
|
||||
}
|
||||
|
||||
locals {
|
||||
jetbrains_ides = {
|
||||
"GO" = {
|
||||
icon = "/icon/goland.svg",
|
||||
name = "GoLand",
|
||||
identifier = "GO",
|
||||
build_number = var.jetbrains_ide_versions["GO"].build_number,
|
||||
download_link = "${var.download_base_link}/go/goland-${var.jetbrains_ide_versions["GO"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["GO"].version
|
||||
},
|
||||
"WS" = {
|
||||
icon = "/icon/webstorm.svg",
|
||||
name = "WebStorm",
|
||||
identifier = "WS",
|
||||
build_number = var.jetbrains_ide_versions["WS"].build_number,
|
||||
download_link = "${var.download_base_link}/webstorm/WebStorm-${var.jetbrains_ide_versions["WS"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["WS"].version
|
||||
},
|
||||
"IU" = {
|
||||
icon = "/icon/intellij.svg",
|
||||
name = "IntelliJ IDEA Ultimate",
|
||||
identifier = "IU",
|
||||
build_number = var.jetbrains_ide_versions["IU"].build_number,
|
||||
download_link = "${var.download_base_link}/idea/ideaIU-${var.jetbrains_ide_versions["IU"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["IU"].version
|
||||
},
|
||||
"PY" = {
|
||||
icon = "/icon/pycharm.svg",
|
||||
name = "PyCharm Professional",
|
||||
identifier = "PY",
|
||||
build_number = var.jetbrains_ide_versions["PY"].build_number,
|
||||
download_link = "${var.download_base_link}/python/pycharm-professional-${var.jetbrains_ide_versions["PY"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["PY"].version
|
||||
},
|
||||
"CL" = {
|
||||
icon = "/icon/clion.svg",
|
||||
name = "CLion",
|
||||
identifier = "CL",
|
||||
build_number = var.jetbrains_ide_versions["CL"].build_number,
|
||||
download_link = "${var.download_base_link}/cpp/CLion-${var.jetbrains_ide_versions["CL"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["CL"].version
|
||||
},
|
||||
"PS" = {
|
||||
icon = "/icon/phpstorm.svg",
|
||||
name = "PhpStorm",
|
||||
identifier = "PS",
|
||||
build_number = var.jetbrains_ide_versions["PS"].build_number,
|
||||
download_link = "${var.download_base_link}/webide/PhpStorm-${var.jetbrains_ide_versions["PS"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["PS"].version
|
||||
},
|
||||
"RM" = {
|
||||
icon = "/icon/rubymine.svg",
|
||||
name = "RubyMine",
|
||||
identifier = "RM",
|
||||
build_number = var.jetbrains_ide_versions["RM"].build_number,
|
||||
download_link = "${var.download_base_link}/ruby/RubyMine-${var.jetbrains_ide_versions["RM"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["RM"].version
|
||||
},
|
||||
"RD" = {
|
||||
icon = "/icon/rider.svg",
|
||||
name = "Rider",
|
||||
identifier = "RD",
|
||||
build_number = var.jetbrains_ide_versions["RD"].build_number,
|
||||
download_link = "${var.download_base_link}/rider/JetBrains.Rider-${var.jetbrains_ide_versions["RD"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["RD"].version
|
||||
},
|
||||
"RR" = {
|
||||
icon = "/icon/rustrover.svg",
|
||||
name = "RustRover",
|
||||
identifier = "RR",
|
||||
build_number = var.jetbrains_ide_versions["RR"].build_number,
|
||||
download_link = "${var.download_base_link}/rustrover/RustRover-${var.jetbrains_ide_versions["RR"].version}.tar.gz"
|
||||
version = var.jetbrains_ide_versions["RR"].version
|
||||
}
|
||||
}
|
||||
|
||||
icon = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].icon
|
||||
json_data = var.latest ? jsondecode(data.http.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].response_body) : {}
|
||||
key = var.latest ? keys(local.json_data)[0] : ""
|
||||
display_name = local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].name
|
||||
identifier = data.coder_parameter.jetbrains_ide.value
|
||||
download_link = var.latest ? local.json_data[local.key][0].downloads.linux.link : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].download_link
|
||||
build_number = var.latest ? local.json_data[local.key][0].build : local.jetbrains_ides[data.coder_parameter.jetbrains_ide.value].build_number
|
||||
version = var.latest ? local.json_data[local.key][0].version : var.jetbrains_ide_versions[data.coder_parameter.jetbrains_ide.value].version
|
||||
}
|
||||
|
||||
data "coder_parameter" "jetbrains_ide" {
|
||||
type = "string"
|
||||
name = "jetbrains_ide"
|
||||
display_name = "JetBrains IDE"
|
||||
icon = "/icon/gateway.svg"
|
||||
mutable = true
|
||||
default = var.default == "" ? var.jetbrains_ides[0] : var.default
|
||||
order = var.coder_parameter_order
|
||||
|
||||
dynamic "option" {
|
||||
for_each = var.jetbrains_ides
|
||||
content {
|
||||
icon = local.jetbrains_ides[option.value].icon
|
||||
name = local.jetbrains_ides[option.value].name
|
||||
value = option.value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
resource "coder_app" "gateway" {
|
||||
agent_id = var.agent_id
|
||||
slug = var.slug
|
||||
display_name = local.display_name
|
||||
icon = local.icon
|
||||
external = true
|
||||
order = var.order
|
||||
url = join("", [
|
||||
"jetbrains-gateway://connect#type=coder&workspace=",
|
||||
data.coder_workspace.me.name,
|
||||
"&owner=",
|
||||
data.coder_workspace_owner.me.name,
|
||||
"&folder=",
|
||||
var.folder,
|
||||
"&url=",
|
||||
data.coder_workspace.me.access_url,
|
||||
"&token=",
|
||||
"$SESSION_TOKEN",
|
||||
"&ide_product_code=",
|
||||
data.coder_parameter.jetbrains_ide.value,
|
||||
"&ide_build_number=",
|
||||
local.build_number,
|
||||
"&ide_download_link=",
|
||||
local.download_link,
|
||||
])
|
||||
}
|
||||
|
||||
output "identifier" {
|
||||
value = local.identifier
|
||||
}
|
||||
|
||||
output "display_name" {
|
||||
value = local.display_name
|
||||
}
|
||||
|
||||
output "icon" {
|
||||
value = local.icon
|
||||
}
|
||||
|
||||
output "download_link" {
|
||||
value = local.download_link
|
||||
}
|
||||
|
||||
output "build_number" {
|
||||
value = local.build_number
|
||||
}
|
||||
|
||||
output "version" {
|
||||
value = local.version
|
||||
}
|
||||
|
||||
output "url" {
|
||||
value = coder_app.gateway.url
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
{"Modules":[{"Key":"","Source":"","Dir":"."},{"Key":"claude_code","Source":"registry.coder.com/coder/claude-code/coder","Version":"3.0.0","Dir":".terraform/modules/claude_code"},{"Key":"claude_code.agentapi","Source":"registry.coder.com/coder/agentapi/coder","Version":"1.1.1","Dir":".terraform/modules/claude_code.agentapi"},{"Key":"cursor_desktop","Source":"registry.coder.com/coder/cursor/coder","Version":"1.3.2","Dir":".terraform/modules/cursor_desktop"},{"Key":"jetbrains_gateway","Source":"registry.coder.com/modules/jetbrains-gateway/coder","Version":"1.0.29","Dir":".terraform/modules/jetbrains_gateway"},{"Key":"pycharm_desktop","Source":"registry.coder.com/coder/jetbrains-gateway/coder","Version":"1.2.2","Dir":".terraform/modules/pycharm_desktop"},{"Key":"windsurf_desktop","Source":"registry.coder.com/coder/windsurf/coder","Version":"1.2.0","Dir":".terraform/modules/windsurf_desktop"}]}
|
||||
@@ -1,375 +0,0 @@
|
||||
Copyright (c) 2017 HashiCorp, Inc.
|
||||
|
||||
Mozilla Public License Version 2.0
|
||||
==================================
|
||||
|
||||
1. Definitions
|
||||
--------------
|
||||
|
||||
1.1. "Contributor"
|
||||
means each individual or legal entity that creates, contributes to
|
||||
the creation of, or owns Covered Software.
|
||||
|
||||
1.2. "Contributor Version"
|
||||
means the combination of the Contributions of others (if any) used
|
||||
by a Contributor and that particular Contributor's Contribution.
|
||||
|
||||
1.3. "Contribution"
|
||||
means Covered Software of a particular Contributor.
|
||||
|
||||
1.4. "Covered Software"
|
||||
means Source Code Form to which the initial Contributor has attached
|
||||
the notice in Exhibit A, the Executable Form of such Source Code
|
||||
Form, and Modifications of such Source Code Form, in each case
|
||||
including portions thereof.
|
||||
|
||||
1.5. "Incompatible With Secondary Licenses"
|
||||
means
|
||||
|
||||
(a) that the initial Contributor has attached the notice described
|
||||
in Exhibit B to the Covered Software; or
|
||||
|
||||
(b) that the Covered Software was made available under the terms of
|
||||
version 1.1 or earlier of the License, but not also under the
|
||||
terms of a Secondary License.
|
||||
|
||||
1.6. "Executable Form"
|
||||
means any form of the work other than Source Code Form.
|
||||
|
||||
1.7. "Larger Work"
|
||||
means a work that combines Covered Software with other material, in
|
||||
a separate file or files, that is not Covered Software.
|
||||
|
||||
1.8. "License"
|
||||
means this document.
|
||||
|
||||
1.9. "Licensable"
|
||||
means having the right to grant, to the maximum extent possible,
|
||||
whether at the time of the initial grant or subsequently, any and
|
||||
all of the rights conveyed by this License.
|
||||
|
||||
1.10. "Modifications"
|
||||
means any of the following:
|
||||
|
||||
(a) any file in Source Code Form that results from an addition to,
|
||||
deletion from, or modification of the contents of Covered
|
||||
Software; or
|
||||
|
||||
(b) any new file in Source Code Form that contains any Covered
|
||||
Software.
|
||||
|
||||
1.11. "Patent Claims" of a Contributor
|
||||
means any patent claim(s), including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by such
|
||||
Contributor that would be infringed, but for the grant of the
|
||||
License, by the making, using, selling, offering for sale, having
|
||||
made, import, or transfer of either its Contributions or its
|
||||
Contributor Version.
|
||||
|
||||
1.12. "Secondary License"
|
||||
means either the GNU General Public License, Version 2.0, the GNU
|
||||
Lesser General Public License, Version 2.1, the GNU Affero General
|
||||
Public License, Version 3.0, or any later versions of those
|
||||
licenses.
|
||||
|
||||
1.13. "Source Code Form"
|
||||
means the form of the work preferred for making modifications.
|
||||
|
||||
1.14. "You" (or "Your")
|
||||
means an individual or a legal entity exercising rights under this
|
||||
License. For legal entities, "You" includes any entity that
|
||||
controls, is controlled by, or is under common control with You. For
|
||||
purposes of this definition, "control" means (a) the power, direct
|
||||
or indirect, to cause the direction or management of such entity,
|
||||
whether by contract or otherwise, or (b) ownership of more than
|
||||
fifty percent (50%) of the outstanding shares or beneficial
|
||||
ownership of such entity.
|
||||
|
||||
2. License Grants and Conditions
|
||||
--------------------------------
|
||||
|
||||
2.1. Grants
|
||||
|
||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or trademark)
|
||||
Licensable by such Contributor to use, reproduce, make available,
|
||||
modify, display, perform, distribute, and otherwise exploit its
|
||||
Contributions, either on an unmodified basis, with Modifications, or
|
||||
as part of a Larger Work; and
|
||||
|
||||
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
||||
for sale, have made, import, and otherwise transfer either its
|
||||
Contributions or its Contributor Version.
|
||||
|
||||
2.2. Effective Date
|
||||
|
||||
The licenses granted in Section 2.1 with respect to any Contribution
|
||||
become effective for each Contribution on the date the Contributor first
|
||||
distributes such Contribution.
|
||||
|
||||
2.3. Limitations on Grant Scope
|
||||
|
||||
The licenses granted in this Section 2 are the only rights granted under
|
||||
this License. No additional rights or licenses will be implied from the
|
||||
distribution or licensing of Covered Software under this License.
|
||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||
Contributor:
|
||||
|
||||
(a) for any code that a Contributor has removed from Covered Software;
|
||||
or
|
||||
|
||||
(b) for infringements caused by: (i) Your and any other third party's
|
||||
modifications of Covered Software, or (ii) the combination of its
|
||||
Contributions with other software (except as part of its Contributor
|
||||
Version); or
|
||||
|
||||
(c) under Patent Claims infringed by Covered Software in the absence of
|
||||
its Contributions.
|
||||
|
||||
This License does not grant any rights in the trademarks, service marks,
|
||||
or logos of any Contributor (except as may be necessary to comply with
|
||||
the notice requirements in Section 3.4).
|
||||
|
||||
2.4. Subsequent Licenses
|
||||
|
||||
No Contributor makes additional grants as a result of Your choice to
|
||||
distribute the Covered Software under a subsequent version of this
|
||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||
permitted under the terms of Section 3.3).
|
||||
|
||||
2.5. Representation
|
||||
|
||||
Each Contributor represents that the Contributor believes its
|
||||
Contributions are its original creation(s) or it has sufficient rights
|
||||
to grant the rights to its Contributions conveyed by this License.
|
||||
|
||||
2.6. Fair Use
|
||||
|
||||
This License is not intended to limit any rights You have under
|
||||
applicable copyright doctrines of fair use, fair dealing, or other
|
||||
equivalents.
|
||||
|
||||
2.7. Conditions
|
||||
|
||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
||||
in Section 2.1.
|
||||
|
||||
3. Responsibilities
|
||||
-------------------
|
||||
|
||||
3.1. Distribution of Source Form
|
||||
|
||||
All distribution of Covered Software in Source Code Form, including any
|
||||
Modifications that You create or to which You contribute, must be under
|
||||
the terms of this License. You must inform recipients that the Source
|
||||
Code Form of the Covered Software is governed by the terms of this
|
||||
License, and how they can obtain a copy of this License. You may not
|
||||
attempt to alter or restrict the recipients' rights in the Source Code
|
||||
Form.
|
||||
|
||||
3.2. Distribution of Executable Form
|
||||
|
||||
If You distribute Covered Software in Executable Form then:
|
||||
|
||||
(a) such Covered Software must also be made available in Source Code
|
||||
Form, as described in Section 3.1, and You must inform recipients of
|
||||
the Executable Form how they can obtain a copy of such Source Code
|
||||
Form by reasonable means in a timely manner, at a charge no more
|
||||
than the cost of distribution to the recipient; and
|
||||
|
||||
(b) You may distribute such Executable Form under the terms of this
|
||||
License, or sublicense it under different terms, provided that the
|
||||
license for the Executable Form does not attempt to limit or alter
|
||||
the recipients' rights in the Source Code Form under this License.
|
||||
|
||||
3.3. Distribution of a Larger Work
|
||||
|
||||
You may create and distribute a Larger Work under terms of Your choice,
|
||||
provided that You also comply with the requirements of this License for
|
||||
the Covered Software. If the Larger Work is a combination of Covered
|
||||
Software with a work governed by one or more Secondary Licenses, and the
|
||||
Covered Software is not Incompatible With Secondary Licenses, this
|
||||
License permits You to additionally distribute such Covered Software
|
||||
under the terms of such Secondary License(s), so that the recipient of
|
||||
the Larger Work may, at their option, further distribute the Covered
|
||||
Software under the terms of either this License or such Secondary
|
||||
License(s).
|
||||
|
||||
3.4. Notices
|
||||
|
||||
You may not remove or alter the substance of any license notices
|
||||
(including copyright notices, patent notices, disclaimers of warranty,
|
||||
or limitations of liability) contained within the Source Code Form of
|
||||
the Covered Software, except that You may alter any license notices to
|
||||
the extent required to remedy known factual inaccuracies.
|
||||
|
||||
3.5. Application of Additional Terms
|
||||
|
||||
You may choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of Covered
|
||||
Software. However, You may do so only on Your own behalf, and not on
|
||||
behalf of any Contributor. You must make it absolutely clear that any
|
||||
such warranty, support, indemnity, or liability obligation is offered by
|
||||
You alone, and You hereby agree to indemnify every Contributor for any
|
||||
liability incurred by such Contributor as a result of warranty, support,
|
||||
indemnity or liability terms You offer. You may include additional
|
||||
disclaimers of warranty and limitations of liability specific to any
|
||||
jurisdiction.
|
||||
|
||||
4. Inability to Comply Due to Statute or Regulation
|
||||
---------------------------------------------------
|
||||
|
||||
If it is impossible for You to comply with any of the terms of this
|
||||
License with respect to some or all of the Covered Software due to
|
||||
statute, judicial order, or regulation then You must: (a) comply with
|
||||
the terms of this License to the maximum extent possible; and (b)
|
||||
describe the limitations and the code they affect. Such description must
|
||||
be placed in a text file included with all distributions of the Covered
|
||||
Software under this License. Except to the extent prohibited by statute
|
||||
or regulation, such description must be sufficiently detailed for a
|
||||
recipient of ordinary skill to be able to understand it.
|
||||
|
||||
5. Termination
|
||||
--------------
|
||||
|
||||
5.1. The rights granted under this License will terminate automatically
|
||||
if You fail to comply with any of its terms. However, if You become
|
||||
compliant, then the rights granted under this License from a particular
|
||||
Contributor are reinstated (a) provisionally, unless and until such
|
||||
Contributor explicitly and finally terminates Your grants, and (b) on an
|
||||
ongoing basis, if such Contributor fails to notify You of the
|
||||
non-compliance by some reasonable means prior to 60 days after You have
|
||||
come back into compliance. Moreover, Your grants from a particular
|
||||
Contributor are reinstated on an ongoing basis if such Contributor
|
||||
notifies You of the non-compliance by some reasonable means, this is the
|
||||
first time You have received notice of non-compliance with this License
|
||||
from such Contributor, and You become compliant prior to 30 days after
|
||||
Your receipt of the notice.
|
||||
|
||||
5.2. If You initiate litigation against any entity by asserting a patent
|
||||
infringement claim (excluding declaratory judgment actions,
|
||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||
directly or indirectly infringes any patent, then the rights granted to
|
||||
You by any and all Contributors for the Covered Software under Section
|
||||
2.1 of this License shall terminate.
|
||||
|
||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
||||
end user license agreements (excluding distributors and resellers) which
|
||||
have been validly granted by You or Your distributors under this License
|
||||
prior to termination shall survive termination.
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 6. Disclaimer of Warranty *
|
||||
* ------------------------- *
|
||||
* *
|
||||
* Covered Software is provided under this License on an "as is" *
|
||||
* basis, without warranty of any kind, either expressed, implied, or *
|
||||
* statutory, including, without limitation, warranties that the *
|
||||
* Covered Software is free of defects, merchantable, fit for a *
|
||||
* particular purpose or non-infringing. The entire risk as to the *
|
||||
* quality and performance of the Covered Software is with You. *
|
||||
* Should any Covered Software prove defective in any respect, You *
|
||||
* (not any Contributor) assume the cost of any necessary servicing, *
|
||||
* repair, or correction. This disclaimer of warranty constitutes an *
|
||||
* essential part of this License. No use of any Covered Software is *
|
||||
* authorized under this License except under this disclaimer. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 7. Limitation of Liability *
|
||||
* -------------------------- *
|
||||
* *
|
||||
* Under no circumstances and under no legal theory, whether tort *
|
||||
* (including negligence), contract, or otherwise, shall any *
|
||||
* Contributor, or anyone who distributes Covered Software as *
|
||||
* permitted above, be liable to You for any direct, indirect, *
|
||||
* special, incidental, or consequential damages of any character *
|
||||
* including, without limitation, damages for lost profits, loss of *
|
||||
* goodwill, work stoppage, computer failure or malfunction, or any *
|
||||
* and all other commercial damages or losses, even if such party *
|
||||
* shall have been informed of the possibility of such damages. This *
|
||||
* limitation of liability shall not apply to liability for death or *
|
||||
* personal injury resulting from such party's negligence to the *
|
||||
* extent applicable law prohibits such limitation. Some *
|
||||
* jurisdictions do not allow the exclusion or limitation of *
|
||||
* incidental or consequential damages, so this exclusion and *
|
||||
* limitation may not apply to You. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
8. Litigation
|
||||
-------------
|
||||
|
||||
Any litigation relating to this License may be brought only in the
|
||||
courts of a jurisdiction where the defendant maintains its principal
|
||||
place of business and such litigation shall be governed by laws of that
|
||||
jurisdiction, without reference to its conflict-of-law provisions.
|
||||
Nothing in this Section shall prevent a party's ability to bring
|
||||
cross-claims or counter-claims.
|
||||
|
||||
9. Miscellaneous
|
||||
----------------
|
||||
|
||||
This License represents the complete agreement concerning the subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. Any law or regulation which provides
|
||||
that the language of a contract shall be construed against the drafter
|
||||
shall not be used to construe this License against a Contributor.
|
||||
|
||||
10. Versions of the License
|
||||
---------------------------
|
||||
|
||||
10.1. New Versions
|
||||
|
||||
Mozilla Foundation is the license steward. Except as provided in Section
|
||||
10.3, no one other than the license steward has the right to modify or
|
||||
publish new versions of this License. Each version will be given a
|
||||
distinguishing version number.
|
||||
|
||||
10.2. Effect of New Versions
|
||||
|
||||
You may distribute the Covered Software under the terms of the version
|
||||
of the License under which You originally received the Covered Software,
|
||||
or under the terms of any subsequent version published by the license
|
||||
steward.
|
||||
|
||||
10.3. Modified Versions
|
||||
|
||||
If you create software not governed by this License, and you want to
|
||||
create a new license for such software, you may create and use a
|
||||
modified version of this License if you rename the license and remove
|
||||
any references to the name of the license steward (except to note that
|
||||
such modified license differs from this License).
|
||||
|
||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||
Licenses
|
||||
|
||||
If You choose to distribute Source Code Form that is Incompatible With
|
||||
Secondary Licenses under the terms of this version of the License, the
|
||||
notice described in Exhibit B of this License must be attached.
|
||||
|
||||
Exhibit A - Source Code Form License Notice
|
||||
-------------------------------------------
|
||||
|
||||
This Source Code Form is subject to the terms of the Mozilla Public
|
||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
If it is not possible or desirable to put the notice in a particular
|
||||
file, then You may include the notice in a location (such as a LICENSE
|
||||
file in a relevant directory) where a recipient would be likely to look
|
||||
for such a notice.
|
||||
|
||||
You may add additional accurate notices of copyright ownership.
|
||||
|
||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||
---------------------------------------------------------
|
||||
|
||||
This Source Code Form is "Incompatible With Secondary Licenses", as
|
||||
defined by the Mozilla Public License, v. 2.0.
|
||||
Binary file not shown.
50
tf/README.md
50
tf/README.md
@@ -1,50 +0,0 @@
|
||||
# Terraform Workspace Template
|
||||
|
||||
This Terraform module provisions a Coder workspace that mirrors the devcontainer experience defined in this repository. The files in `tf/` are mounted into the workspace at `/home/coder/code-tools`, so the helper scripts referenced below are always available.
|
||||
|
||||
## What You Get
|
||||
|
||||
- One Docker workspace container built from `var.devcontainer_image` (defaults to the universal Dev Container image).
|
||||
- Optional PostgreSQL, Redis, and Qdrant services running on the same Docker network, plus pgAdmin and Jupyter toggles.
|
||||
- Startup scripts that install core tooling and (optionally) AI helpers for Claude, Cursor, and Windsurf.
|
||||
- A trimmed Coder application list (VS Code, Terminal, pgAdmin, Qdrant, Jupyter, and a few common dev ports).
|
||||
|
||||
## Key Inputs
|
||||
|
||||
| Name | Description | Default |
|
||||
| --- | --- | --- |
|
||||
| `devcontainer_image` | Workspace container image | `mcr.microsoft.com/devcontainers/universal:2-linux` |
|
||||
| `workspace_memory_limit` | Memory limit in MB (0 = image default) | `8192` |
|
||||
| `enable_docker_in_docker` | Mount `/var/run/docker.sock` | `true` |
|
||||
| `postgres_password` / `redis_password` | Service credentials | `devpassword` |
|
||||
| `postgres_max_connections` | PostgreSQL connection cap | `100` |
|
||||
| `redis_max_memory` | Redis maxmemory setting | `512mb` |
|
||||
| `pgadmin_email` / `pgadmin_password` | pgAdmin login | `admin@dev.local` / `adminpassword` |
|
||||
| `install_*` flags | Control which AI helpers run when enabled | all `true` |
|
||||
|
||||
Workspace creators see only a handful of parameters:
|
||||
1. Optional repository URL to clone into `/workspaces`.
|
||||
2. Toggles for data services, AI tooling, pgAdmin, Jupyter, and JetBrains Gateway.
|
||||
|
||||
## Files
|
||||
|
||||
```
|
||||
main.tf # Providers, parameters, locals, Docker primitives
|
||||
workspace.tf # Coder agent and workspace container
|
||||
services.tf # PostgreSQL / Redis / Qdrant (+ pgAdmin & Jupyter)
|
||||
apps.tf # Essential Coder apps and dev-port helpers
|
||||
scripts.tf # Core + AI scripts wired to the agent
|
||||
variables.tf # Minimal variable surface area
|
||||
terraform.tfvars# Opinionated defaults you can override
|
||||
outputs.tf # Helpful connection strings and metadata
|
||||
scripts/ # Shell scripts invoked by Terraform resources
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
1. From the Coder deployment (mounted at `/home/coder/code-tools/tf`), run `terraform init` and `terraform apply`.
|
||||
2. When prompted for the **Project repository**, supply any Git URL to clone into `/workspaces` or leave it blank for an empty workspace.
|
||||
3. Toggle services and AI tools to suit the workspace. If services are enabled, the bundled `port-forward.sh` script exposes pgAdmin on `localhost:5050` and Qdrant on `localhost:6333`.
|
||||
4. The devcontainer image should install language toolchains; the `workspace-setup.sh` and `dev-tools.sh` scripts simply finish configuration inside the workspace.
|
||||
|
||||
Refer to [Coder’s devcontainer template guide](https://coder.com/docs/@v2.26.0/admin/templates/managing-templates/devcontainers/add-devcontainer) for broader context on how this Terraform fits into your deployment.
|
||||
134
tf/apps.tf
134
tf/apps.tf
@@ -1,134 +0,0 @@
|
||||
resource "coder_app" "code_server" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "code-server"
|
||||
display_name = "VS Code"
|
||||
url = "http://localhost:8080"
|
||||
icon = "/icon/code.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:8080/healthz"
|
||||
interval = 10
|
||||
threshold = 5
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_app" "terminal" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "terminal"
|
||||
display_name = "Terminal"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash"
|
||||
}
|
||||
|
||||
resource "coder_app" "pgadmin" {
|
||||
count = local.services_enabled && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "pgadmin"
|
||||
display_name = "pgAdmin"
|
||||
url = "http://localhost:5050"
|
||||
icon = "/icon/postgres.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:5050"
|
||||
interval = 15
|
||||
threshold = 5
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_app" "qdrant" {
|
||||
count = local.services_enabled ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "qdrant"
|
||||
display_name = "Qdrant"
|
||||
url = "http://localhost:6333"
|
||||
icon = "/icon/database.svg"
|
||||
subdomain = false
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:6333/health"
|
||||
interval = 30
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_app" "jupyter" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "jupyter"
|
||||
display_name = "JupyterLab"
|
||||
url = "http://localhost:8888"
|
||||
icon = "/icon/jupyter.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:8888"
|
||||
interval = 20
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
locals {
|
||||
dev_ports = {
|
||||
"dev-3000" = {
|
||||
display = "Web Dev (3000)"
|
||||
url = "http://localhost:3000"
|
||||
icon = "/icon/javascript.svg"
|
||||
}
|
||||
"api-8000" = {
|
||||
display = "API (8000)"
|
||||
url = "http://localhost:8000"
|
||||
icon = "/icon/node.svg"
|
||||
}
|
||||
"vite-5173" = {
|
||||
display = "Vite (5173)"
|
||||
url = "http://localhost:5173"
|
||||
icon = "/icon/typescript.svg"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_app" "dev_ports" {
|
||||
for_each = local.dev_ports
|
||||
|
||||
agent_id = coder_agent.main.id
|
||||
slug = each.key
|
||||
display_name = each.value.display
|
||||
url = each.value.url
|
||||
icon = each.value.icon
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = each.value.url
|
||||
interval = 10
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_app" "claude_cli" {
|
||||
count = data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "claude-cli"
|
||||
display_name = "Claude CLI"
|
||||
icon = "/icon/claude.svg"
|
||||
command = "bash -lc 'claude --dangerously-skip-permissions'"
|
||||
group = "AI Tools"
|
||||
order = 10
|
||||
}
|
||||
|
||||
resource "coder_app" "codex_cli" {
|
||||
count = data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "codex-cli"
|
||||
display_name = "Codex CLI"
|
||||
icon = "/icon/code.svg"
|
||||
command = "bash -lc 'codex --dangerously-bypass-approvals-and-sandbox'"
|
||||
group = "AI Tools"
|
||||
order = 20
|
||||
}
|
||||
193
tf/main.tf
193
tf/main.tf
@@ -1,193 +0,0 @@
|
||||
terraform {
|
||||
required_version = ">= 1.3.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = ">= 2.7"
|
||||
}
|
||||
docker = {
|
||||
source = "kreuzwerker/docker"
|
||||
version = "~> 2.25"
|
||||
}
|
||||
http = {
|
||||
source = "hashicorp/http"
|
||||
version = ">= 3.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "coder" {}
|
||||
|
||||
provider "docker" {
|
||||
host = var.docker_socket != "" ? var.docker_socket : null
|
||||
}
|
||||
|
||||
provider "http" {}
|
||||
|
||||
# Workspace context
|
||||
|
||||
data "coder_provisioner" "me" {}
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
# User inputs kept intentionally small so the template is easy to launch.
|
||||
|
||||
data "coder_parameter" "project_repository" {
|
||||
name = "project_repository"
|
||||
display_name = "Project repository"
|
||||
description = "Optional Git URL cloned into /workspaces on first startup."
|
||||
default = ""
|
||||
mutable = true
|
||||
order = 1
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_services" {
|
||||
name = "enable_services"
|
||||
display_name = "Enable PostgreSQL / Redis / Qdrant"
|
||||
description = "Provision bundled data services inside the workspace network."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 2
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_ai_tools" {
|
||||
name = "enable_ai_tools"
|
||||
display_name = "Install AI tooling"
|
||||
description = "Run the bundled AI helper scripts (Claude, Cursor, Windsurf)."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 3
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_pgadmin" {
|
||||
name = "enable_pgadmin"
|
||||
display_name = "Expose pgAdmin"
|
||||
description = "Start the pgAdmin container when database services are enabled."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 4
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jupyter" {
|
||||
name = "enable_jupyter"
|
||||
display_name = "Expose JupyterLab"
|
||||
description = "Start the optional JupyterLab container."
|
||||
type = "bool"
|
||||
default = "false"
|
||||
mutable = true
|
||||
order = 5
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jetbrains" {
|
||||
name = "enable_jetbrains"
|
||||
display_name = "JetBrains Gateway"
|
||||
description = "Install JetBrains Gateway integration for this workspace."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 6
|
||||
}
|
||||
|
||||
data "coder_parameter" "ai_prompt" {
|
||||
name = "AI Prompt"
|
||||
display_name = "AI Task Prompt"
|
||||
description = "Optional pre-filled prompt shown when starting a Claude Code task."
|
||||
type = "string"
|
||||
default = ""
|
||||
mutable = true
|
||||
order = 7
|
||||
form_type = "textarea"
|
||||
}
|
||||
|
||||
locals {
|
||||
bind_mounts = [
|
||||
{ host = "${var.host_home_path}/.gitconfig", container = "/home/coder/.gitconfig" },
|
||||
{ host = "${var.host_home_path}/.git-credentials", container = "/home/coder/.git-credentials" },
|
||||
{ host = "${var.host_home_path}/.ssh", container = "/home/coder/.ssh" },
|
||||
{ host = "${var.host_home_path}/.zshrc", container = "/home/coder/.zshrc" },
|
||||
{ host = "${var.host_home_path}/.oh-my-zsh", container = "/home/coder/.oh-my-zsh" },
|
||||
{ host = "${var.host_home_path}/.zsh_history", container = "/home/coder/.zsh_history" },
|
||||
{ host = "${var.host_home_path}/.p10k.zsh", container = "/home/coder/.p10k.zsh" },
|
||||
{ host = "${var.host_home_path}/.claude", container = "/home/coder/.claude" },
|
||||
{ host = "${var.host_home_path}/.codex", container = "/home/coder/.codex" },
|
||||
{ host = "${var.host_home_path}/.1password", container = "/home/coder/.1password" },
|
||||
{ host = "${var.host_home_path}/.config", container = "/home/coder/.config" },
|
||||
{ host = "${var.host_home_path}", container = "/home/coder", read_only = false },
|
||||
{ host = "${var.host_home_path}/.local", container = "/home/coder/.local" },
|
||||
{ host = "${var.host_home_path}/.cache", container = "/home/coder/.cache" },
|
||||
{ host = "${var.host_home_path}/.docker/config.json", container = "/home/coder/.docker/config.json" },
|
||||
{ host = "${var.host_home_path}/code-tools", container = "/home/coder/code-tools" },
|
||||
{ host = "${var.host_home_path}/claude-scripts", container = "/home/coder/claude-scripts" },
|
||||
]
|
||||
|
||||
workspace_id = data.coder_workspace.me.id
|
||||
container_name = "coder-${local.workspace_id}"
|
||||
|
||||
git_author_name = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||
git_author_email = data.coder_workspace_owner.me.email
|
||||
|
||||
project_repo_url = trimspace(data.coder_parameter.project_repository.value)
|
||||
repo_clone_command = local.project_repo_url != "" ? "git clone ${local.project_repo_url} /workspaces" : "echo 'No repository requested'"
|
||||
|
||||
services_enabled = data.coder_parameter.enable_services.value
|
||||
pgadmin_enabled = data.coder_parameter.enable_pgadmin.value
|
||||
jupyter_enabled = data.coder_parameter.enable_jupyter.value
|
||||
port_forwarding = local.services_enabled || local.jupyter_enabled
|
||||
|
||||
postgres_url = "postgresql://postgres:${var.postgres_password}@postgres-${local.workspace_id}:5432/postgres"
|
||||
redis_url = "redis://:${var.redis_password}@redis-${local.workspace_id}:6379"
|
||||
qdrant_url = "http://qdrant-${local.workspace_id}:6333"
|
||||
|
||||
agent_startup = join("\n", compact([
|
||||
"set -eu",
|
||||
"export CODER_WORKSPACE_ID=${local.workspace_id}",
|
||||
"git config --global user.name \"${local.git_author_name}\"",
|
||||
"git config --global user.email \"${local.git_author_email}\"",
|
||||
local.project_repo_url != "" ? "if [ ! -d /workspaces/.git ]; then ${local.repo_clone_command}; fi" : "",
|
||||
"export ENABLE_PGADMIN=${tostring(local.pgadmin_enabled)}",
|
||||
"export ENABLE_JUPYTER=${tostring(local.jupyter_enabled)}",
|
||||
local.port_forwarding ? "bash /home/coder/code-tools/tf/scripts/port-forward.sh" : "echo 'No service port forwarding requested'"
|
||||
]))
|
||||
}
|
||||
|
||||
# Workspace network keeps the workspace stack isolated from the host.
|
||||
resource "docker_network" "workspace" {
|
||||
name = "coder-${local.workspace_id}"
|
||||
driver = "bridge"
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
}
|
||||
}
|
||||
|
||||
# Persistent workspace data volume mounted at /workspaces inside the container.
|
||||
resource "docker_volume" "workspaces" {
|
||||
name = "workspaces-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.type"
|
||||
value = "workspace-data"
|
||||
}
|
||||
}
|
||||
|
||||
# Separate persistent home directory for the coder user.
|
||||
# Base development container image (customise via terraform.tfvars).
|
||||
resource "docker_image" "devcontainer" {
|
||||
name = var.devcontainer_image
|
||||
keep_locally = true
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
output "workspace_id" {
|
||||
description = "Coder workspace ID"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
output "workspace_name" {
|
||||
description = "Coder workspace name"
|
||||
value = data.coder_workspace.me.name
|
||||
}
|
||||
|
||||
output "container_name" {
|
||||
description = "Name of the workspace Docker container"
|
||||
value = local.container_name
|
||||
}
|
||||
|
||||
output "project_repository" {
|
||||
description = "Repository cloned into /workspaces on first startup"
|
||||
value = local.project_repo_url
|
||||
}
|
||||
|
||||
output "postgres_url" {
|
||||
description = "Internal PostgreSQL connection string"
|
||||
value = local.services_enabled ? local.postgres_url : null
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
output "redis_url" {
|
||||
description = "Internal Redis connection string"
|
||||
value = local.services_enabled ? local.redis_url : null
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
output "qdrant_url" {
|
||||
description = "Internal Qdrant endpoint"
|
||||
value = local.services_enabled ? local.qdrant_url : null
|
||||
}
|
||||
|
||||
output "docker_network_name" {
|
||||
description = "Docker network assigned to this workspace"
|
||||
value = docker_network.workspace.name
|
||||
}
|
||||
|
||||
output "workspace_volume_name" {
|
||||
description = "Docker volume used for /workspaces"
|
||||
value = docker_volume.workspaces.name
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
locals {
|
||||
core_scripts = {
|
||||
workspace = {
|
||||
display = "Setup Development Workspace"
|
||||
icon = "/icon/container.svg"
|
||||
path = "${path.module}/scripts/workspace-setup.sh"
|
||||
}
|
||||
dev_tools = {
|
||||
display = "Install Development Tools"
|
||||
icon = "/icon/code.svg"
|
||||
path = "${path.module}/scripts/dev-tools.sh"
|
||||
}
|
||||
git_hooks = {
|
||||
display = "Configure Git Hooks"
|
||||
icon = "/icon/git.svg"
|
||||
path = "${path.module}/scripts/git-hooks.sh"
|
||||
}
|
||||
}
|
||||
|
||||
ai_scripts = {
|
||||
claude = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_claude_code
|
||||
display = "Install Claude CLI"
|
||||
icon = "/icon/claude.svg"
|
||||
path = "${path.module}/scripts/claude-install.sh"
|
||||
}
|
||||
codex = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_codex_support
|
||||
display = "Install Codex CLI"
|
||||
icon = "/icon/code.svg"
|
||||
path = "${path.module}/scripts/codex-setup.sh"
|
||||
}
|
||||
cursor = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_cursor_support
|
||||
display = "Configure Cursor"
|
||||
icon = "/icon/cursor.svg"
|
||||
path = "${path.module}/scripts/cursor-setup.sh"
|
||||
}
|
||||
windsurf = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_windsurf_support
|
||||
display = "Configure Windsurf"
|
||||
icon = "/icon/windsurf.svg"
|
||||
path = "${path.module}/scripts/windsurf-setup.sh"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "coder_script" "core" {
|
||||
for_each = local.core_scripts
|
||||
|
||||
agent_id = coder_agent.main.id
|
||||
display_name = each.value.display
|
||||
icon = each.value.icon
|
||||
run_on_start = true
|
||||
|
||||
script = "echo '${base64encode(file(each.value.path))}' | base64 -d | tr -d '\\r' | bash"
|
||||
}
|
||||
|
||||
resource "coder_script" "ai" {
|
||||
for_each = { for key, value in local.ai_scripts : key => value if value.enabled }
|
||||
|
||||
agent_id = coder_agent.main.id
|
||||
display_name = each.value.display
|
||||
icon = each.value.icon
|
||||
run_on_start = true
|
||||
|
||||
script = "echo '${base64encode(file(each.value.path))}' | base64 -d | tr -d '\\r' | bash"
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if command -v claude >/dev/null 2>&1; then
|
||||
echo "Claude CLI already installed ($(claude --version))"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
mkdir -p "$HOME/bin"
|
||||
|
||||
if ! command -v npm >/dev/null 2>&1; then
|
||||
echo "npm is required to install the Claude CLI" >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
npm install -g @anthropic-ai/claude-code >/dev/null
|
||||
|
||||
cat <<'SCRIPT' > "$HOME/bin/claude-help"
|
||||
#!/usr/bin/env bash
|
||||
cat <<'TXT'
|
||||
Claude CLI quick start
|
||||
----------------------
|
||||
claude auth login # authenticate
|
||||
claude chat # open an interactive chat
|
||||
claude edit <file> # AI assisted editing
|
||||
claude analyze . # Review the current directory
|
||||
TXT
|
||||
SCRIPT
|
||||
chmod +x "$HOME/bin/claude-help"
|
||||
|
||||
echo "Claude CLI installed. Run 'claude-help' for usage tips."
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if ! command -v npm >/dev/null 2>&1; then
|
||||
echo 'npm not found; skipping Codex CLI install.' >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if command -v sudo >/dev/null 2>&1; then
|
||||
sudo npm install -g @openai/codex >/dev/null 2>&1 || sudo npm install -g @openai/codex
|
||||
else
|
||||
npm install -g @openai/codex >/dev/null 2>&1 || npm install -g @openai/codex
|
||||
fi
|
||||
|
||||
echo 'OpenAI Codex CLI installed or already present.'
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
CURSOR_DIR="$HOME/.cursor-server"
|
||||
mkdir -p "$CURSOR_DIR/data/User" "$CURSOR_DIR/extensions"
|
||||
|
||||
cat <<'JSON' > "$CURSOR_DIR/data/User/settings.json"
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2,
|
||||
"files.autoSave": "afterDelay",
|
||||
"files.autoSaveDelay": 1000,
|
||||
"git.enableSmartCommit": true,
|
||||
"terminal.integrated.defaultProfile.linux": "bash"
|
||||
}
|
||||
JSON
|
||||
|
||||
cat <<'JSON' > "$CURSOR_DIR/data/User/keybindings.json"
|
||||
[
|
||||
{
|
||||
"key": "ctrl+shift+;",
|
||||
"command": "workbench.action.terminal.toggleTerminal"
|
||||
}
|
||||
]
|
||||
JSON
|
||||
|
||||
echo "Cursor configuration refreshed in $CURSOR_DIR"
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
wait_for_apt() {
|
||||
if command -v fuser >/dev/null 2>&1; then
|
||||
while \
|
||||
fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1 ||
|
||||
fuser /var/lib/apt/lists/lock >/dev/null 2>&1 ||
|
||||
fuser /var/lib/dpkg/lock >/dev/null 2>&1; do
|
||||
echo "Waiting for apt locks to clear..."
|
||||
sleep 2
|
||||
done
|
||||
else
|
||||
# Fallback when fuser is unavailable
|
||||
sleep 2
|
||||
fi
|
||||
}
|
||||
|
||||
apt_exec() {
|
||||
if command -v sudo >/dev/null 2>&1; then
|
||||
sudo "$@"
|
||||
else
|
||||
"$@"
|
||||
fi
|
||||
}
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
wait_for_apt
|
||||
apt_exec apt-get update -qq
|
||||
wait_for_apt
|
||||
apt_exec apt-get install -y \
|
||||
make tree jq curl wget unzip git ripgrep fd-find htop >/dev/null
|
||||
# fd binary name differs on Debian
|
||||
if [[ ! -e /usr/local/bin/fd && -e /usr/bin/fdfind ]]; then
|
||||
apt_exec ln -sf /usr/bin/fdfind /usr/local/bin/fd
|
||||
fi
|
||||
fi
|
||||
|
||||
if command -v npm >/dev/null 2>&1; then
|
||||
npm install -g tldr fkill-cli >/dev/null 2>&1 || true
|
||||
fi
|
||||
|
||||
echo "Development tooling refreshed."
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
REPO_DIR="/workspaces"
|
||||
HOOK_DIR="$REPO_DIR/.git/hooks"
|
||||
META_DIR="/tmp/git-metadata"
|
||||
|
||||
if [[ ! -d "$REPO_DIR/.git" ]]; then
|
||||
echo "No Git repository found in $REPO_DIR; skipping hook install"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
mkdir -p "$HOOK_DIR" "$META_DIR"
|
||||
|
||||
cat <<'HOOK' > "$HOOK_DIR/post-commit"
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
META_DIR=/tmp/git-metadata
|
||||
mkdir -p "$META_DIR"
|
||||
|
||||
git branch --show-current > "$META_DIR/current-branch" 2>/dev/null || echo "main" > "$META_DIR/current-branch"
|
||||
git rev-parse HEAD > "$META_DIR/commit-hash" 2>/dev/null || echo "unknown" > "$META_DIR/commit-hash"
|
||||
git remote get-url origin > "$META_DIR/remote-url" 2>/dev/null || echo "no-remote" > "$META_DIR/remote-url"
|
||||
HOOK
|
||||
chmod +x "$HOOK_DIR/post-commit"
|
||||
|
||||
echo "Git post-commit hook installed for metadata capture."
|
||||
@@ -1,57 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
WORKSPACE_ID="${CODER_WORKSPACE_ID:-}"
|
||||
if [[ -z "${WORKSPACE_ID}" && -f /tmp/git-metadata/workspace-id ]]; then
|
||||
WORKSPACE_ID="$(cat /tmp/git-metadata/workspace-id)"
|
||||
fi
|
||||
|
||||
if [[ -z "${WORKSPACE_ID}" ]]; then
|
||||
echo "Unable to determine CODER_WORKSPACE_ID; skipping port forwarding" >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
SERVICES_ENABLED="${ENABLE_SERVICES:-false}"
|
||||
PGADMIN_ENABLED="${ENABLE_PGADMIN:-false}"
|
||||
JUPYTER_ENABLED="${ENABLE_JUPYTER:-false}"
|
||||
|
||||
if ! command -v socat >/dev/null 2>&1; then
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y socat >/dev/null
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
sudo apk add --no-cache socat >/dev/null
|
||||
else
|
||||
echo "socat is required for port forwarding but could not be installed automatically" >&2
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# stop previous forwards if they exist
|
||||
pkill -f "socat.*pgadmin" >/dev/null 2>&1 || true
|
||||
pkill -f "socat.*qdrant" >/dev/null 2>&1 || true
|
||||
pkill -f "socat.*jupyter" >/dev/null 2>&1 || true
|
||||
|
||||
if [[ "${SERVICES_ENABLED}" == "true" ]]; then
|
||||
if [[ "${PGADMIN_ENABLED}" == "true" ]]; then
|
||||
echo "Forwarding pgAdmin to localhost:5050"
|
||||
nohup socat TCP-LISTEN:5050,reuseaddr,fork TCP:pgadmin-${WORKSPACE_ID}:80 >/tmp/socat-pgadmin.log 2>&1 &
|
||||
else
|
||||
echo "pgAdmin disabled; skipping port forward"
|
||||
fi
|
||||
|
||||
echo "Forwarding Qdrant to localhost:6333"
|
||||
nohup socat TCP-LISTEN:6333,reuseaddr,fork TCP:qdrant-${WORKSPACE_ID}:6333 >/tmp/socat-qdrant.log 2>&1 &
|
||||
else
|
||||
echo "Database services disabled; skipping pgAdmin/Qdrant forwards"
|
||||
fi
|
||||
|
||||
if [[ "${JUPYTER_ENABLED}" == "true" ]]; then
|
||||
echo "Forwarding JupyterLab to localhost:8888"
|
||||
nohup socat TCP-LISTEN:8888,reuseaddr,fork TCP:jupyter-${WORKSPACE_ID}:8888 >/tmp/socat-jupyter.log 2>&1 &
|
||||
else
|
||||
echo "JupyterLab disabled; skipping port forward"
|
||||
fi
|
||||
|
||||
sleep 2
|
||||
ps -o pid,cmd -C socat || true
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
WIND_DIR="$HOME/.windsurf"
|
||||
mkdir -p "$WIND_DIR/User"
|
||||
|
||||
cat <<'JSON' > "$WIND_DIR/User/settings.json"
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2,
|
||||
"files.autoSave": "onFocusChange"
|
||||
}
|
||||
JSON
|
||||
|
||||
echo "Windsurf settings written to $WIND_DIR"
|
||||
@@ -1,78 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Workspace Metrics Display Script
|
||||
# Formats CPU, Memory, and Disk usage with proper units and precision
|
||||
|
||||
set -e
|
||||
|
||||
echo "=== Workspace Metrics ==="
|
||||
|
||||
# CPU Usage - convert cores to percentage (assuming 4 cores total, adjust as needed)
|
||||
if command -v coder >/dev/null 2>&1; then
|
||||
# Use coder stat if available (more accurate for container metrics)
|
||||
cpu_usage=$(coder stat cpu 2>/dev/null || echo "N/A")
|
||||
if [[ "$cpu_usage" != "N/A" ]]; then
|
||||
echo "CPU Usage: $cpu_usage"
|
||||
else
|
||||
# Fallback to top-based calculation
|
||||
cpu_percent=$(top -bn1 | awk 'FNR==3 {printf "%.2f%%", $2+$4}' 2>/dev/null || echo "N/A")
|
||||
echo "CPU Usage: $cpu_percent"
|
||||
fi
|
||||
else
|
||||
# Calculate CPU percentage using top
|
||||
cpu_percent=$(top -bn1 | awk 'FNR==3 {printf "%.2f%%", $2+$4}' 2>/dev/null || echo "N/A")
|
||||
echo "CPU Usage: $cpu_percent"
|
||||
fi
|
||||
|
||||
# Memory Usage - show both absolute values in GB and percentage
|
||||
if command -v coder >/dev/null 2>&1; then
|
||||
# Try coder stat first
|
||||
mem_usage=$(coder stat mem --prefix Gi 2>/dev/null || echo "")
|
||||
if [[ -n "$mem_usage" ]]; then
|
||||
echo "Memory Usage: $mem_usage"
|
||||
else
|
||||
# Fallback to manual calculation
|
||||
mem_info=$(free -g | awk 'NR==2{
|
||||
used=$3;
|
||||
total=$2;
|
||||
percent=(used/total)*100;
|
||||
printf "%.2f/%.2f GB (%.1f%%)", used, total, percent
|
||||
}' 2>/dev/null || echo "N/A")
|
||||
echo "Memory Usage: $mem_info"
|
||||
fi
|
||||
else
|
||||
# Manual memory calculation in GB
|
||||
mem_info=$(free -g | awk 'NR==2{
|
||||
used=$3;
|
||||
total=$2;
|
||||
percent=(used/total)*100;
|
||||
printf "%.2f/%.2f GB (%.1f%%)", used, total, percent
|
||||
}' 2>/dev/null || echo "N/A")
|
||||
echo "Memory Usage: $mem_info"
|
||||
fi
|
||||
|
||||
# Disk Usage - focus on /home or workspace directory instead of root
|
||||
# First try to find the actual workspace mount point
|
||||
workspace_path="/home"
|
||||
if [[ -d "/workspaces" ]]; then
|
||||
workspace_path="/workspaces"
|
||||
elif [[ -d "/home/coder" ]]; then
|
||||
workspace_path="/home/coder"
|
||||
fi
|
||||
|
||||
# Get disk usage for the workspace path
|
||||
disk_usage=$(df -h "$workspace_path" 2>/dev/null | awk 'NR==2 {
|
||||
used=$3;
|
||||
total=$2;
|
||||
percent=$5;
|
||||
printf "%s/%s (%s)", used, total, percent
|
||||
}' || echo "N/A")
|
||||
|
||||
echo "Disk Usage: $disk_usage"
|
||||
|
||||
# Load Average (optional additional metric)
|
||||
if [[ -f /proc/loadavg ]]; then
|
||||
load_avg=$(awk '{printf "%.2f %.2f %.2f", $1, $2, $3}' /proc/loadavg 2>/dev/null || echo "N/A")
|
||||
echo "Load Average: $load_avg"
|
||||
fi
|
||||
|
||||
echo "========================="
|
||||
@@ -1,70 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
USER_NAME="${USER:-coder}"
|
||||
HOME_DIR="${HOME:-/home/${USER_NAME}}"
|
||||
WORKSPACES_DIR="/workspaces"
|
||||
BIN_DIR="${HOME_DIR}/bin"
|
||||
META_DIR="/tmp/git-metadata"
|
||||
|
||||
mkdir -p "${BIN_DIR}" "${HOME_DIR}/.config" "${META_DIR}" "${WORKSPACES_DIR}"
|
||||
|
||||
if [[ -n "${CODER_WORKSPACE_ID:-}" ]]; then
|
||||
echo "${CODER_WORKSPACE_ID}" > "${META_DIR}/workspace-id"
|
||||
fi
|
||||
|
||||
# ensure the dev workspace has sensible permissions
|
||||
if command -v chown >/dev/null 2>&1 && [[ "${EUID}" -eq 0 ]]; then
|
||||
chown -R "${USER_NAME}:${USER_NAME}" "${HOME_DIR}" "${WORKSPACES_DIR}" || true
|
||||
fi
|
||||
|
||||
# Configure git user (startup script already sets git config, but we guard here too)
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
if [[ -n "${GIT_AUTHOR_NAME:-}" ]]; then
|
||||
git config --global user.name "${GIT_AUTHOR_NAME}"
|
||||
fi
|
||||
if [[ -n "${GIT_AUTHOR_EMAIL:-}" ]]; then
|
||||
git config --global user.email "${GIT_AUTHOR_EMAIL}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Build a lightweight devinfo helper for quick diagnostics.
|
||||
cat <<'SCRIPT' > "${BIN_DIR}/devinfo"
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "Workspace diagnostics"
|
||||
echo "----------------------"
|
||||
echo "User: $(whoami)"
|
||||
echo "Home: ${HOME}"
|
||||
echo "Workspace: /workspaces"
|
||||
|
||||
if command -v node >/dev/null 2>&1; then
|
||||
echo "Node: $(node --version)"
|
||||
fi
|
||||
if command -v npm >/dev/null 2>&1; then
|
||||
echo "npm: $(npm --version)"
|
||||
fi
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
echo "Python: $(python3 --version | awk '{print $2}')"
|
||||
fi
|
||||
if command -v rustc >/dev/null 2>&1; then
|
||||
echo "Rust: $(rustc --version | awk '{print $2}')"
|
||||
fi
|
||||
if command -v cargo >/dev/null 2>&1; then
|
||||
echo "Cargo: $(cargo --version | awk '{print $2}')"
|
||||
fi
|
||||
|
||||
if [[ -n "${POSTGRES_URL:-}" ]]; then
|
||||
echo "PostgreSQL: ${POSTGRES_URL}"
|
||||
fi
|
||||
if [[ -n "${REDIS_URL:-}" ]]; then
|
||||
echo "Redis: ${REDIS_URL}"
|
||||
fi
|
||||
if [[ -n "${QDRANT_URL:-}" ]]; then
|
||||
echo "Qdrant: ${QDRANT_URL}"
|
||||
fi
|
||||
SCRIPT
|
||||
chmod +x "${BIN_DIR}/devinfo"
|
||||
|
||||
echo "Workspace initialisation complete. Run 'devinfo' for a quick status summary."
|
||||
296
tf/services.tf
296
tf/services.tf
@@ -1,296 +0,0 @@
|
||||
# Data services run inside the per-workspace Docker network. They stay optional
|
||||
# so light-weight workspaces can skip all of them.
|
||||
|
||||
resource "docker_volume" "postgres_data" {
|
||||
count = local.services_enabled ? 1 : 0
|
||||
name = "postgres-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "postgres"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_container" "postgres" {
|
||||
count = local.services_enabled ? 1 : 0
|
||||
image = "postgres:${var.postgres_version}-alpine"
|
||||
name = "postgres-${local.workspace_id}"
|
||||
|
||||
env = [
|
||||
"POSTGRES_DB=postgres",
|
||||
"POSTGRES_USER=postgres",
|
||||
"POSTGRES_PASSWORD=${var.postgres_password}",
|
||||
"POSTGRES_INITDB_ARGS=--auth-local=trust --auth-host=md5",
|
||||
"POSTGRES_SHARED_PRELOAD_LIBRARIES=pg_stat_statements",
|
||||
"POSTGRES_MAX_CONNECTIONS=${var.postgres_max_connections}"
|
||||
]
|
||||
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
volumes {
|
||||
volume_name = docker_volume.postgres_data[0].name
|
||||
container_path = "/var/lib/postgresql/data"
|
||||
}
|
||||
|
||||
healthcheck {
|
||||
test = ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval = "15s"
|
||||
timeout = "5s"
|
||||
retries = 5
|
||||
start_period = "30s"
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "postgres"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_volume" "redis_data" {
|
||||
count = local.services_enabled ? 1 : 0
|
||||
name = "redis-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "redis"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_container" "redis" {
|
||||
count = local.services_enabled ? 1 : 0
|
||||
image = "redis:${var.redis_version}-alpine"
|
||||
name = "redis-${local.workspace_id}"
|
||||
|
||||
command = [
|
||||
"redis-server",
|
||||
"--requirepass", var.redis_password,
|
||||
"--appendonly", "yes",
|
||||
"--appendfsync", "everysec",
|
||||
"--maxmemory", var.redis_max_memory,
|
||||
"--maxmemory-policy", "allkeys-lru"
|
||||
]
|
||||
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
volumes {
|
||||
volume_name = docker_volume.redis_data[0].name
|
||||
container_path = "/data"
|
||||
}
|
||||
|
||||
healthcheck {
|
||||
test = ["CMD", "redis-cli", "-a", var.redis_password, "ping"]
|
||||
interval = "15s"
|
||||
timeout = "3s"
|
||||
retries = 5
|
||||
start_period = "10s"
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "redis"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_volume" "qdrant_data" {
|
||||
count = local.services_enabled ? 1 : 0
|
||||
name = "qdrant-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "qdrant"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_container" "qdrant" {
|
||||
count = local.services_enabled ? 1 : 0
|
||||
image = "qdrant/qdrant:${var.qdrant_version}"
|
||||
name = "qdrant-${local.workspace_id}"
|
||||
|
||||
env = [
|
||||
"QDRANT__SERVICE__HTTP_PORT=6333",
|
||||
"QDRANT__SERVICE__GRPC_PORT=6334",
|
||||
"QDRANT__SERVICE__HOST=0.0.0.0",
|
||||
"QDRANT__LOG_LEVEL=INFO"
|
||||
]
|
||||
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
volumes {
|
||||
volume_name = docker_volume.qdrant_data[0].name
|
||||
container_path = "/qdrant/storage"
|
||||
}
|
||||
|
||||
healthcheck {
|
||||
test = ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:6333/health || exit 1"]
|
||||
interval = "20s"
|
||||
timeout = "5s"
|
||||
retries = 5
|
||||
start_period = "40s"
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "qdrant"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_volume" "pgadmin_data" {
|
||||
count = local.services_enabled && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
name = "pgadmin-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "pgadmin"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_container" "pgadmin" {
|
||||
count = local.services_enabled && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
image = "dpage/pgadmin4:latest"
|
||||
name = "pgadmin-${local.workspace_id}"
|
||||
|
||||
env = [
|
||||
"PGADMIN_DEFAULT_EMAIL=${var.pgadmin_email}",
|
||||
"PGADMIN_DEFAULT_PASSWORD=${var.pgadmin_password}",
|
||||
"PGADMIN_CONFIG_SERVER_MODE=False",
|
||||
"PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED=False",
|
||||
"PGADMIN_LISTEN_PORT=80"
|
||||
]
|
||||
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
volumes {
|
||||
volume_name = docker_volume.pgadmin_data[0].name
|
||||
container_path = "/var/lib/pgadmin"
|
||||
}
|
||||
|
||||
healthcheck {
|
||||
test = ["CMD-SHELL", "nc -z localhost 80 || exit 1"]
|
||||
interval = "30s"
|
||||
timeout = "10s"
|
||||
retries = 3
|
||||
start_period = "60s"
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "pgadmin"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_volume" "jupyter_data" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
name = "jupyter-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "jupyter"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_container" "jupyter" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
image = "jupyter/scipy-notebook:latest"
|
||||
name = "jupyter-${local.workspace_id}"
|
||||
|
||||
env = [
|
||||
"JUPYTER_ENABLE_LAB=yes",
|
||||
"JUPYTER_TOKEN=",
|
||||
"RESTARTABLE=yes",
|
||||
"JUPYTER_PORT=8888"
|
||||
]
|
||||
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
volumes {
|
||||
volume_name = docker_volume.jupyter_data[0].name
|
||||
container_path = "/home/jovyan/work"
|
||||
}
|
||||
|
||||
volumes {
|
||||
volume_name = docker_volume.workspaces.name
|
||||
container_path = "/home/jovyan/workspaces"
|
||||
}
|
||||
|
||||
healthcheck {
|
||||
test = ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8888"]
|
||||
interval = "30s"
|
||||
timeout = "10s"
|
||||
retries = 5
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "jupyter"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
devcontainer_image = "mcr.microsoft.com/devcontainers/universal:2-linux"
|
||||
workspace_memory_limit = 8192
|
||||
postgres_password = "devpassword"
|
||||
redis_password = "devpassword"
|
||||
postgres_max_connections = 100
|
||||
redis_max_memory = "512mb"
|
||||
pgadmin_email = "admin@dev.local"
|
||||
pgadmin_password = "adminpassword"
|
||||
116
tf/variables.tf
116
tf/variables.tf
@@ -1,116 +0,0 @@
|
||||
variable "host_home_path" {
|
||||
description = "Absolute path to the host home directory for bind mounts."
|
||||
type = string
|
||||
default = "/home/trav"
|
||||
}
|
||||
|
||||
variable "docker_socket" {
|
||||
description = "Docker daemon socket URI (leave blank for default)."
|
||||
type = string
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "devcontainer_image" {
|
||||
description = "Container image used for the main workspace."
|
||||
type = string
|
||||
default = "mcr.microsoft.com/devcontainers/universal:2-linux"
|
||||
}
|
||||
|
||||
variable "workspace_memory_limit" {
|
||||
description = "Workspace memory limit in MB. Use 0 to inherit the image defaults."
|
||||
type = number
|
||||
default = 8192
|
||||
}
|
||||
|
||||
variable "enable_docker_in_docker" {
|
||||
description = "Mount /var/run/docker.sock into the workspace container."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "block_file_transfer" {
|
||||
description = "Set CODER_AGENT_BLOCK_FILE_TRANSFER=1 to disable file transfer tooling."
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "postgres_version" {
|
||||
description = "PostgreSQL image tag."
|
||||
type = string
|
||||
default = "17"
|
||||
}
|
||||
|
||||
variable "postgres_password" {
|
||||
description = "PostgreSQL password for the postgres user."
|
||||
type = string
|
||||
default = "devpassword"
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "postgres_max_connections" {
|
||||
description = "Maximum PostgreSQL connections."
|
||||
type = number
|
||||
default = 100
|
||||
}
|
||||
|
||||
variable "redis_version" {
|
||||
description = "Redis image tag."
|
||||
type = string
|
||||
default = "7"
|
||||
}
|
||||
|
||||
variable "redis_password" {
|
||||
description = "Redis AUTH password."
|
||||
type = string
|
||||
default = "devpassword"
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "redis_max_memory" {
|
||||
description = "Redis maxmemory value (e.g. 256mb)."
|
||||
type = string
|
||||
default = "512mb"
|
||||
}
|
||||
|
||||
variable "qdrant_version" {
|
||||
description = "Qdrant image tag."
|
||||
type = string
|
||||
default = "latest"
|
||||
}
|
||||
|
||||
variable "pgadmin_email" {
|
||||
description = "pgAdmin login email."
|
||||
type = string
|
||||
default = "admin@dev.local"
|
||||
}
|
||||
|
||||
variable "pgadmin_password" {
|
||||
description = "pgAdmin login password."
|
||||
type = string
|
||||
default = "adminpassword"
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "install_claude_code" {
|
||||
description = "Install the Claude CLI helper when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "install_cursor_support" {
|
||||
description = "Install Cursor configuration when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "install_windsurf_support" {
|
||||
description = "Install Windsurf configuration when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "install_codex_support" {
|
||||
description = "Install OpenAI Codex CLI when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
232
tf/workspace.tf
232
tf/workspace.tf
@@ -1,232 +0,0 @@
|
||||
resource "coder_agent" "main" {
|
||||
arch = data.coder_provisioner.me.arch
|
||||
os = "linux"
|
||||
dir = "/workspaces"
|
||||
|
||||
env = {
|
||||
"GIT_AUTHOR_NAME" = local.git_author_name
|
||||
"GIT_AUTHOR_EMAIL" = local.git_author_email
|
||||
"CODER_WORKSPACE_ID" = local.workspace_id
|
||||
"CODER_WORKSPACE_REPO" = local.project_repo_url
|
||||
"POSTGRES_URL" = local.services_enabled ? local.postgres_url : ""
|
||||
"REDIS_URL" = local.services_enabled ? local.redis_url : ""
|
||||
"QDRANT_URL" = local.services_enabled ? local.qdrant_url : ""
|
||||
"ENABLE_PGADMIN" = tostring(local.pgadmin_enabled)
|
||||
"ENABLE_JUPYTER" = tostring(local.jupyter_enabled)
|
||||
"ENABLE_SERVICES" = tostring(local.services_enabled)
|
||||
"CODER_AGENT_BLOCK_FILE_TRANSFER" = var.block_file_transfer ? "1" : ""
|
||||
}
|
||||
|
||||
startup_script = local.agent_startup
|
||||
|
||||
metadata {
|
||||
display_name = "CPU Usage"
|
||||
key = "0_cpu_usage"
|
||||
script = "coder stat cpu 2>/dev/null || echo 'n/a'"
|
||||
interval = 30
|
||||
timeout = 5
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Memory Usage"
|
||||
key = "1_memory_usage"
|
||||
script = "coder stat mem 2>/dev/null || echo 'n/a'"
|
||||
interval = 30
|
||||
timeout = 5
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Disk Usage"
|
||||
key = "2_disk_usage"
|
||||
script = "df -h /workspaces 2>/dev/null | awk 'NR==2 {print $5}' || echo 'n/a'"
|
||||
interval = 300
|
||||
timeout = 10
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Git Branch"
|
||||
key = "3_git_branch"
|
||||
script = "cd /workspaces && git branch --show-current 2>/dev/null || echo 'no-repo'"
|
||||
interval = 300
|
||||
timeout = 5
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "PostgreSQL"
|
||||
key = "4_postgres"
|
||||
script = local.services_enabled ? format("pg_isready -h postgres-%s -p 5432 -U postgres >/dev/null && echo healthy || echo down", local.workspace_id) : "echo 'disabled'"
|
||||
interval = 60
|
||||
timeout = 5
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Redis"
|
||||
key = "5_redis"
|
||||
script = local.services_enabled ? format("redis-cli -h redis-%s -a %s ping 2>/dev/null | grep -qi pong && echo healthy || echo down", local.workspace_id, var.redis_password) : "echo 'disabled'"
|
||||
interval = 60
|
||||
timeout = 5
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Qdrant"
|
||||
key = "6_qdrant"
|
||||
script = local.services_enabled ? format("wget --no-verbose --tries=1 --spider http://qdrant-%s:6333/health 2>/dev/null && echo healthy || echo down", local.workspace_id) : "echo 'disabled'"
|
||||
interval = 60
|
||||
timeout = 5
|
||||
}
|
||||
}
|
||||
|
||||
resource "docker_container" "workspace" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
image = docker_image.devcontainer.image_id
|
||||
name = local.container_name
|
||||
hostname = data.coder_workspace.me.name
|
||||
|
||||
memory = var.workspace_memory_limit > 0 ? var.workspace_memory_limit * 1024 * 1024 : null
|
||||
|
||||
env = compact([
|
||||
"GIT_AUTHOR_NAME=${local.git_author_name}",
|
||||
"GIT_AUTHOR_EMAIL=${local.git_author_email}",
|
||||
"CODER_AGENT_TOKEN=${coder_agent.main.token}",
|
||||
"CODER_AGENT_DEVCONTAINERS_ENABLE=true",
|
||||
local.project_repo_url != "" ? "CODER_WORKSPACE_REPO=${local.project_repo_url}" : "",
|
||||
])
|
||||
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
host {
|
||||
host = "host.docker.internal"
|
||||
ip = "host-gateway"
|
||||
}
|
||||
|
||||
volumes {
|
||||
container_path = "/workspaces"
|
||||
volume_name = docker_volume.workspaces.name
|
||||
}
|
||||
|
||||
|
||||
dynamic "volumes" {
|
||||
for_each = local.bind_mounts
|
||||
content {
|
||||
host_path = volumes.value.host
|
||||
container_path = volumes.value.container
|
||||
read_only = try(volumes.value.read_only, false)
|
||||
}
|
||||
}
|
||||
|
||||
dynamic "volumes" {
|
||||
for_each = var.enable_docker_in_docker ? [1] : []
|
||||
content {
|
||||
host_path = "/var/run/docker.sock"
|
||||
container_path = "/var/run/docker.sock"
|
||||
}
|
||||
}
|
||||
|
||||
working_dir = "/workspaces"
|
||||
command = ["/bin/bash", "-c", "${coder_agent.main.init_script} && sleep infinity"]
|
||||
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
depends_on = [
|
||||
docker_network.workspace,
|
||||
docker_volume.workspaces,
|
||||
docker_image.devcontainer
|
||||
]
|
||||
}
|
||||
|
||||
module "cursor_desktop" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/cursor/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
folder = "/workspaces"
|
||||
group = "Desktop IDEs"
|
||||
order = 40
|
||||
}
|
||||
|
||||
module "windsurf_desktop" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/windsurf/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
folder = "/workspaces"
|
||||
group = "Desktop IDEs"
|
||||
order = 50
|
||||
}
|
||||
|
||||
module "pycharm_desktop" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_jetbrains.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/jetbrains-gateway/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
folder = "/workspaces"
|
||||
jetbrains_ides = ["PY"]
|
||||
default = "PY"
|
||||
group = "Desktop IDEs"
|
||||
order = 60
|
||||
coder_parameter_order = 6
|
||||
slug = "pycharm-gateway"
|
||||
}
|
||||
|
||||
module "claude_code" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/claude-code/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
workdir = "/workspaces"
|
||||
group = "AI Tools"
|
||||
order = 30
|
||||
pre_install_script = <<-EOT
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
if command -v curl >/dev/null 2>&1 && curl --help 2>&1 | grep -q -- "--retry-all-errors"; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
real_curl="$(command -v curl || true)"
|
||||
if [[ -z "$real_curl" ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
wrapper=/usr/local/bin/curl
|
||||
if [[ "$real_curl" == "$wrapper" ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
real_curl="$(readlink -f "$real_curl")"
|
||||
|
||||
python3 - <<'PY' "$real_curl" "$wrapper"
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
|
||||
real = sys.argv[1]
|
||||
wrapper = sys.argv[2]
|
||||
script = """#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
|
||||
real_curl = {real!r}
|
||||
args = [arg for arg in sys.argv[1:] if arg != "--retry-all-errors"]
|
||||
os.execv(real_curl, [real_curl] + args)
|
||||
""".format(real=real)
|
||||
|
||||
with open(wrapper, "w", encoding="utf-8") as fh:
|
||||
fh.write(script)
|
||||
|
||||
os.chmod(
|
||||
wrapper,
|
||||
stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
|
||||
stat.S_IRGRP | stat.S_IXGRP |
|
||||
stat.S_IROTH | stat.S_IXOTH,
|
||||
)
|
||||
PY
|
||||
EOT
|
||||
}
|
||||
Reference in New Issue
Block a user