yee
This commit is contained in:
@@ -36,7 +36,7 @@
|
||||
},
|
||||
// Python
|
||||
"ghcr.io/devcontainers/features/python:1": {
|
||||
"version": "3.11",
|
||||
"version": "3.12",
|
||||
"installTools": true,
|
||||
"installJupyterlab": true
|
||||
},
|
||||
@@ -87,6 +87,9 @@
|
||||
"remoteUser": "coder",
|
||||
"workspaceFolder": "/workspaces",
|
||||
"workspaceMount": "source=${localWorkspaceFolder},target=/workspaces,type=bind",
|
||||
"mounts": [
|
||||
"source=${localWorkspaceFolder},target=/home/coder/code-tools,type=bind"
|
||||
],
|
||||
"runArgs": [
|
||||
"--cap-add=SYS_PTRACE",
|
||||
"--security-opt",
|
||||
|
||||
@@ -11,15 +11,30 @@ services:
|
||||
CODER_HTTP_ADDRESS: '0.0.0.0:7080'
|
||||
CODER_ACCESS_URL: '${CODER_ACCESS_URL}'
|
||||
CODER_WILDCARD_ACCESS_URL: '${CODER_WILDCARD_ACCESS_URL}'
|
||||
CODER_ADDRESS: '${CODER_ADDRESS}'
|
||||
COOLIFY_RESOURCE_UUID: bwk8ckcok8o84cc0o4os4sso
|
||||
COOLIFY_CONTAINER_NAME: coder-bwk8ckcok8o84cc0o4os4sso
|
||||
COOLIFY_URL: 'http://dev.lab'
|
||||
COOLIFY_FQDN: dev.lab
|
||||
CODER_HTTP_ADDRESS: '${CODER_HTTP_ADDRESS}'
|
||||
GITEA_HOST: '${GITEA_HOST}'
|
||||
GITEA_PAT: '${GITEA_PAT}'
|
||||
GITHUB_PAT: '${GITHUB_PAT}'
|
||||
volumes:
|
||||
- '/home/trav/.gitconfig:/home/coder/.gitconfig'
|
||||
- '/home/trav/.git-credentials:/home/coder/.git-credentials'
|
||||
- '/home/trav/.ssh:/home/coder/.ssh'
|
||||
- '/home/trav/.zshrc:/home/coder/.zshrc'
|
||||
- '/home/trav/.oh-my-zsh:/home/coder/.oh-my-zsh'
|
||||
- '/home/trav/.zsh_history:/home/coder/.zsh_history'
|
||||
- '/home/trav/.p10k.zsh:/home/coder/.p10k.zsh'
|
||||
- '/home/trav/.claude:/home/coder/.claude'
|
||||
- '/home/trav/.codex:/home/coder/.codex'
|
||||
- '/home/trav/.1password:/home/coder/.1password'
|
||||
- '/home/trav/.config:/home/coder/.config'
|
||||
- '/home/trav/:/home/coder/'
|
||||
- '/home/trav/.local/:/home/coder/.local'
|
||||
- '/home/trav/.cache:/home/coder/.cache'
|
||||
- '/home/trav/.docker/config.json:/home/coder/.docker/config.json'
|
||||
- '/home/trav/code-tools:/home/coder/code-tools'
|
||||
- '/home/trav/claude-scripts:/home/coder/claude-scripts'
|
||||
- '/var/run/docker.sock:/var/run/docker.sock'
|
||||
- 'bwk8ckcok8o84cc0o4os4sso_coder-home:/home/coder'
|
||||
- /home/trav/code-tools:/home/coder/resources
|
||||
- 'coder_home:/home/coder'
|
||||
depends_on:
|
||||
database:
|
||||
condition: service_healthy
|
||||
@@ -39,41 +54,14 @@ services:
|
||||
- glance.id=coder
|
||||
- glance.category=dev
|
||||
- glance.hide=false
|
||||
- coolify.managed=true
|
||||
- coolify.version=4.0.0-beta.420.6
|
||||
- coolify.serviceId=41
|
||||
- coolify.type=service
|
||||
- coolify.name=coder-bwk8ckcok8o84cc0o4os4sso
|
||||
- coolify.resourceName=coder
|
||||
- coolify.projectName=development
|
||||
- coolify.serviceName=coder
|
||||
- coolify.environmentName=production
|
||||
- coolify.pullRequestId=0
|
||||
- coolify.service.subId=308
|
||||
- coolify.service.subType=application
|
||||
- coolify.service.subName=coder
|
||||
- traefik.enable=true
|
||||
- traefik.http.middlewares.gzip.compress=true
|
||||
- traefik.http.middlewares.redirect-to-https.redirectscheme.scheme=https
|
||||
- traefik.http.routers.http-0-bwk8ckcok8o84cc0o4os4sso-coder.entryPoints=http
|
||||
- traefik.http.routers.http-0-bwk8ckcok8o84cc0o4os4sso-coder.middlewares=gzip
|
||||
- 'traefik.http.routers.http-0-bwk8ckcok8o84cc0o4os4sso-coder.rule=Host(`dev.lab`) && PathPrefix(`/`)'
|
||||
- traefik.http.routers.http-0-bwk8ckcok8o84cc0o4os4sso-coder.service=http-0-bwk8ckcok8o84cc0o4os4sso-coder
|
||||
- traefik.http.services.http-0-bwk8ckcok8o84cc0o4os4sso-coder.loadbalancer.server.port=7080
|
||||
container_name: coder-bwk8ckcok8o84cc0o4os4sso
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
bwk8ckcok8o84cc0o4os4sso: null
|
||||
database:
|
||||
image: 'postgres:17'
|
||||
environment:
|
||||
POSTGRES_USER: '${POSTGRES_USER:-username}'
|
||||
POSTGRES_PASSWORD: '${POSTGRES_PASSWORD:-password}'
|
||||
POSTGRES_DB: '${POSTGRES_DB:-coder}'
|
||||
COOLIFY_RESOURCE_UUID: bwk8ckcok8o84cc0o4os4sso
|
||||
COOLIFY_CONTAINER_NAME: database-bwk8ckcok8o84cc0o4os4sso
|
||||
volumes:
|
||||
- 'bwk8ckcok8o84cc0o4os4sso_coder-data:/var/lib/postgresql/data'
|
||||
- 'coder_data:/var/lib/postgresql/data'
|
||||
healthcheck:
|
||||
test:
|
||||
- CMD-SHELL
|
||||
@@ -85,31 +73,6 @@ services:
|
||||
- glance.name=Postgres-Coder
|
||||
- glance.parent=coder
|
||||
- glance.hide=false
|
||||
- coolify.managed=true
|
||||
- coolify.version=4.0.0-beta.420.6
|
||||
- coolify.serviceId=41
|
||||
- coolify.type=service
|
||||
- coolify.name=database-bwk8ckcok8o84cc0o4os4sso
|
||||
- coolify.resourceName=coder
|
||||
- coolify.projectName=development
|
||||
- coolify.serviceName=database
|
||||
- coolify.environmentName=production
|
||||
- coolify.pullRequestId=0
|
||||
- coolify.service.subId=38
|
||||
- coolify.service.subType=database
|
||||
- coolify.service.subName=database
|
||||
container_name: database-bwk8ckcok8o84cc0o4os4sso
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
bwk8ckcok8o84cc0o4os4sso: null
|
||||
volumes:
|
||||
bwk8ckcok8o84cc0o4os4sso_coder-home:
|
||||
name: bwk8ckcok8o84cc0o4os4sso_coder-home
|
||||
bwk8ckcok8o84cc0o4os4sso_coder-data:
|
||||
name: bwk8ckcok8o84cc0o4os4sso_coder-data
|
||||
networks:
|
||||
bwk8ckcok8o84cc0o4os4sso:
|
||||
name: bwk8ckcok8o84cc0o4os4sso
|
||||
external: true
|
||||
configs: { }
|
||||
secrets: { }
|
||||
coder_data: null
|
||||
coder_home: null
|
||||
|
||||
12
tf/.terraform.lock.hcl
generated
12
tf/.terraform.lock.hcl
generated
@@ -24,18 +24,6 @@ provider "registry.terraform.io/coder/coder" {
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/coder/envbuilder" {
|
||||
version = "1.0.0"
|
||||
constraints = "~> 1.0"
|
||||
hashes = [
|
||||
"h1:EijMsTkZ+GM+0gSaTR2Rw9FO9vJE7i6w3U5/Z76zBsQ=",
|
||||
"zh:638f85855a86dd9f783ac667d302a2fe072ff6570e866dabed8082d74a246c09",
|
||||
"zh:8316dd29b5015d178cb6f8ecd4b10e4df766a82496d06883ba4c91ef410ce719",
|
||||
"zh:890df766e9b839623b1f0437355032a3c006226a6c200cd911e15ee1a9014e9f",
|
||||
"zh:d91bc816e66c01ef552b04413bd0d83f35a217eb578148da31ba54c0fe0aca31",
|
||||
]
|
||||
}
|
||||
|
||||
provider "registry.terraform.io/hashicorp/http" {
|
||||
version = "3.5.0"
|
||||
constraints = ">= 3.0.0"
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"Modules":[{"Key":"","Source":"","Dir":"."},{"Key":"jetbrains_gateway","Source":"registry.coder.com/modules/jetbrains-gateway/coder","Version":"1.0.29","Dir":".terraform/modules/jetbrains_gateway"}]}
|
||||
{"Modules":[{"Key":"","Source":"","Dir":"."},{"Key":"claude_code","Source":"registry.coder.com/coder/claude-code/coder","Version":"3.0.0","Dir":".terraform/modules/claude_code"},{"Key":"claude_code.agentapi","Source":"registry.coder.com/coder/agentapi/coder","Version":"1.1.1","Dir":".terraform/modules/claude_code.agentapi"},{"Key":"cursor_desktop","Source":"registry.coder.com/coder/cursor/coder","Version":"1.3.2","Dir":".terraform/modules/cursor_desktop"},{"Key":"jetbrains_gateway","Source":"registry.coder.com/modules/jetbrains-gateway/coder","Version":"1.0.29","Dir":".terraform/modules/jetbrains_gateway"},{"Key":"pycharm_desktop","Source":"registry.coder.com/coder/jetbrains-gateway/coder","Version":"1.2.2","Dir":".terraform/modules/pycharm_desktop"},{"Key":"windsurf_desktop","Source":"registry.coder.com/coder/windsurf/coder","Version":"1.2.0","Dir":".terraform/modules/windsurf_desktop"}]}
|
||||
474
tf/README.md
474
tf/README.md
@@ -1,446 +1,50 @@
|
||||
# Terraform Coder Development Environment
|
||||
# Terraform Workspace Template
|
||||
|
||||
A comprehensive development environment deployment using Terraform and Coder, providing isolated workspaces with integrated development tools, databases, and AI-powered coding assistants.
|
||||
This Terraform module provisions a Coder workspace that mirrors the devcontainer experience defined in this repository. The files in `tf/` are mounted into the workspace at `/home/coder/code-tools`, so the helper scripts referenced below are always available.
|
||||
|
||||
## 🏗️ Architecture Overview
|
||||
## What You Get
|
||||
|
||||
This configuration deploys self-contained development workspaces using Docker containers orchestrated by Coder. Each workspace includes:
|
||||
- One Docker workspace container built from `var.devcontainer_image` (defaults to the universal Dev Container image).
|
||||
- Optional PostgreSQL, Redis, and Qdrant services running on the same Docker network, plus pgAdmin and Jupyter toggles.
|
||||
- Startup scripts that install core tooling and (optionally) AI helpers for Claude, Cursor, and Windsurf.
|
||||
- A trimmed Coder application list (VS Code, Terminal, pgAdmin, Qdrant, Jupyter, and a few common dev ports).
|
||||
|
||||
- **Isolated Development Container** with VS Code, terminal access, and full development toolchain
|
||||
- **Database Services** (PostgreSQL, Redis, Qdrant) with persistent storage
|
||||
- **Management Interfaces** (pgAdmin, Qdrant Dashboard)
|
||||
- **AI Development Tools** (Claude Code, Cursor, Windsurf support)
|
||||
- **Reverse Proxy Integration** for seamless web access
|
||||
## Key Inputs
|
||||
|
||||
### Network Architecture
|
||||
| Name | Description | Default |
|
||||
| --- | --- | --- |
|
||||
| `devcontainer_image` | Workspace container image | `mcr.microsoft.com/devcontainers/universal:2-linux` |
|
||||
| `workspace_memory_limit` | Memory limit in MB (0 = image default) | `8192` |
|
||||
| `enable_docker_in_docker` | Mount `/var/run/docker.sock` | `true` |
|
||||
| `postgres_password` / `redis_password` | Service credentials | `devpassword` |
|
||||
| `postgres_max_connections` | PostgreSQL connection cap | `100` |
|
||||
| `redis_max_memory` | Redis maxmemory setting | `512mb` |
|
||||
| `pgadmin_email` / `pgadmin_password` | pgAdmin login | `admin@dev.local` / `adminpassword` |
|
||||
| `install_*` flags | Control which AI helpers run when enabled | all `true` |
|
||||
|
||||
Workspace creators see only a handful of parameters:
|
||||
1. Optional repository URL to clone into `/workspaces`.
|
||||
2. Toggles for data services, AI tooling, pgAdmin, Jupyter, and JetBrains Gateway.
|
||||
|
||||
## Files
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Reverse Proxy │
|
||||
│ http://dev.lab │
|
||||
│ *.dev.lab wildcard │
|
||||
└─────────────────────────┬───────────────────────────────────┘
|
||||
│
|
||||
┌─────────────────────┼─────────────────────┐
|
||||
│ Workspace A │ Workspace B │
|
||||
│ network-{id-a} │ network-{id-b} │
|
||||
│ │ │
|
||||
│ ┌─────────────────┐ │ ┌─────────────────┐ │
|
||||
│ │ Dev Container │ │ │ Dev Container │ │
|
||||
│ │ VS Code:8080 │ │ │ VS Code:8080 │ │
|
||||
│ └─────────────────┘ │ └─────────────────┘ │
|
||||
│ │ │
|
||||
│ ┌─────────────────┐ │ ┌─────────────────┐ │
|
||||
│ │ PostgreSQL:5432 │ │ │ PostgreSQL:5432 │ │
|
||||
│ │ Redis:6379 │ │ │ Redis:6379 │ │
|
||||
│ │ Qdrant:6333 │ │ │ Qdrant:6333 │ │
|
||||
│ │ pgAdmin:5050 │ │ │ pgAdmin:5050 │ │
|
||||
│ └─────────────────┘ │ └─────────────────┘ │
|
||||
└─────────────────────┼─────────────────────┘
|
||||
main.tf # Providers, parameters, locals, Docker primitives
|
||||
workspace.tf # Coder agent and workspace container
|
||||
services.tf # PostgreSQL / Redis / Qdrant (+ pgAdmin & Jupyter)
|
||||
apps.tf # Essential Coder apps and dev-port helpers
|
||||
scripts.tf # Core + AI scripts wired to the agent
|
||||
variables.tf # Minimal variable surface area
|
||||
terraform.tfvars# Opinionated defaults you can override
|
||||
outputs.tf # Helpful connection strings and metadata
|
||||
scripts/ # Shell scripts invoked by Terraform resources
|
||||
```
|
||||
|
||||
**Key Benefits:**
|
||||
- ✅ **Complete Network Isolation** between workspaces
|
||||
- ✅ **No Port Conflicts** - same ports used in different networks
|
||||
- ✅ **Scalable** - unlimited concurrent workspaces
|
||||
- ✅ **Secure** - services only accessible through authenticated Coder session
|
||||
## Usage
|
||||
|
||||
## 📁 File Structure
|
||||
1. From the Coder deployment (mounted at `/home/coder/code-tools/tf`), run `terraform init` and `terraform apply`.
|
||||
2. When prompted for the **Project repository**, supply any Git URL to clone into `/workspaces` or leave it blank for an empty workspace.
|
||||
3. Toggle services and AI tools to suit the workspace. If services are enabled, the bundled `port-forward.sh` script exposes pgAdmin on `localhost:5050` and Qdrant on `localhost:6333`.
|
||||
4. The devcontainer image should install language toolchains; the `workspace-setup.sh` and `dev-tools.sh` scripts simply finish configuration inside the workspace.
|
||||
|
||||
```
|
||||
tf/
|
||||
├── README.md # This file
|
||||
├── main.tf # Provider config, parameters, networks, volumes
|
||||
├── variables.tf # All configurable parameters
|
||||
├── terraform.tfvars # Variable assignments
|
||||
├── workspace.tf # Main development container and Coder agent
|
||||
├── services.tf # Database containers (PostgreSQL, Redis, Qdrant)
|
||||
├── apps.tf # Coder applications for service access
|
||||
├── scripts.tf # AI tools installation and configuration
|
||||
└── outputs.tf # Workspace information exports
|
||||
```
|
||||
|
||||
## 🛠️ Included Services & Tools
|
||||
|
||||
### Development Environment
|
||||
- **Container Base**: Microsoft DevContainers Universal image
|
||||
- **Languages**: Node.js 20, Python 3.12, Rust (latest stable)
|
||||
- **Package Managers**: npm, uv (Python), Cargo (Rust)
|
||||
- **System Tools**: make, tree, jq, curl, wget, build-essential
|
||||
|
||||
### Database Services
|
||||
- **PostgreSQL 17** with Alpine Linux base
|
||||
- Connection pooling and performance optimization
|
||||
- pg_stat_statements enabled for query analysis
|
||||
- PostgreSQL client tools included in workspace
|
||||
- **Redis 7** with Alpine Linux base
|
||||
- Authentication enabled with configurable password
|
||||
- AOF persistence with everysec fsync
|
||||
- LRU eviction policy with configurable memory limits
|
||||
- **Qdrant Vector Database** (latest)
|
||||
- HTTP API on port 6333, gRPC on port 6334
|
||||
- Persistent storage for vector collections
|
||||
- Web dashboard for collection management
|
||||
|
||||
### Management Interfaces
|
||||
- **pgAdmin 4** - PostgreSQL administration interface
|
||||
- **Qdrant Dashboard** - Vector database management
|
||||
- **VS Code Server** - Browser-based IDE
|
||||
- **Terminal Access** - Full bash shell access
|
||||
|
||||
### AI Development Tools
|
||||
- **Claude Code CLI** - Anthropic's official CLI
|
||||
- **Cursor Support** - AI-powered code editor integration
|
||||
- **Windsurf Support** - Codeium's development environment
|
||||
|
||||
### Development Packages
|
||||
|
||||
#### Node.js (Global)
|
||||
```javascript
|
||||
repomix, create-next-app, nodemon, concurrently
|
||||
@types/node, typescript, eslint, prettier
|
||||
```
|
||||
|
||||
#### Python (via uv)
|
||||
```python
|
||||
fastapi, uvicorn, requests, pandas, numpy
|
||||
psycopg2-binary, redis, qdrant-client, python-dotenv
|
||||
```
|
||||
|
||||
## 🚀 Quick Start
|
||||
|
||||
### Prerequisites
|
||||
|
||||
1. **Coder Instance** running and accessible
|
||||
2. **Reverse Proxy** configured with wildcard subdomain support
|
||||
3. **Docker** daemon accessible to Coder
|
||||
4. **Terraform** >= 1.0 installed
|
||||
|
||||
### Environment Setup
|
||||
|
||||
Set these environment variables in your Coder deployment:
|
||||
```bash
|
||||
CODER_ACCESS_URL=http://dev.lab
|
||||
CODER_WILDCARD_ACCESS_URL=*.dev.lab
|
||||
```
|
||||
|
||||
### Deployment Steps
|
||||
|
||||
1. **Clone and Navigate**
|
||||
```bash
|
||||
git clone <your-repo>
|
||||
cd tf/
|
||||
```
|
||||
|
||||
2. **Review Configuration**
|
||||
```bash
|
||||
# Edit terraform.tfvars to match your needs
|
||||
vim terraform.tfvars
|
||||
```
|
||||
|
||||
3. **Deploy Infrastructure**
|
||||
```bash
|
||||
terraform init
|
||||
terraform plan
|
||||
terraform apply
|
||||
```
|
||||
|
||||
4. **Access Workspace**
|
||||
- Navigate to your Coder instance
|
||||
- Create new workspace using this template
|
||||
- Select your preferred Git repository
|
||||
- Choose whether to enable database services
|
||||
|
||||
## ⚙️ Configuration
|
||||
|
||||
### Key Variables (terraform.tfvars)
|
||||
|
||||
#### Resource Limits
|
||||
```hcl
|
||||
workspace_memory_limit = 16384 # 16GB RAM
|
||||
workspace_cpu_limit = 4 # 4 CPU cores
|
||||
```
|
||||
|
||||
#### Service Configuration
|
||||
```hcl
|
||||
# Database passwords (change in production!)
|
||||
postgres_password = "devpassword"
|
||||
redis_password = "devpassword"
|
||||
|
||||
# Database tuning
|
||||
postgres_max_connections = 100
|
||||
redis_max_memory = "512mb"
|
||||
```
|
||||
|
||||
#### Feature Toggles
|
||||
```hcl
|
||||
enable_pgadmin = true # PostgreSQL admin interface
|
||||
enable_monitoring = true # Resource monitoring
|
||||
enable_jupyter = false # Jupyter Lab for data science
|
||||
```
|
||||
|
||||
#### Tool Versions
|
||||
```hcl
|
||||
node_version = "20" # Node.js LTS
|
||||
python_version = "3.12" # Python latest stable
|
||||
postgres_version = "17" # PostgreSQL latest
|
||||
redis_version = "7" # Redis latest stable
|
||||
```
|
||||
|
||||
### Coder Parameters
|
||||
|
||||
When creating a workspace, you'll be prompted for:
|
||||
|
||||
1. **Git Repository** - Select from your available repositories
|
||||
2. **Enable Services** - Toggle database services on/off
|
||||
3. **Enable AI Tools** - Toggle AI development tool installation
|
||||
|
||||
## 🌐 Service Access
|
||||
|
||||
### Web Applications
|
||||
|
||||
All services are accessible through Coder's reverse proxy with subdomain routing:
|
||||
|
||||
| Service | URL Pattern | Description |
|
||||
|---------|-------------|-------------|
|
||||
| VS Code | `code-server-{workspace}.dev.lab` | Browser-based IDE |
|
||||
| Terminal | Available in Coder dashboard | Full bash shell |
|
||||
| pgAdmin | `pgadmin-{workspace}.dev.lab` | PostgreSQL management |
|
||||
| Qdrant | `qdrant-dashboard-{workspace}.dev.lab` | Vector DB dashboard |
|
||||
| Dev Server | `nextjs-3000-{workspace}.dev.lab` | Next.js dev server |
|
||||
| API Server | `api-8000-{workspace}.dev.lab` | FastAPI/Flask server |
|
||||
| Vite Dev | `vite-5173-{workspace}.dev.lab` | Vite development |
|
||||
|
||||
### Database Connections
|
||||
|
||||
From within your workspace container:
|
||||
|
||||
```bash
|
||||
# PostgreSQL
|
||||
export POSTGRES_URL="postgresql://postgres:devpassword@postgres-{workspace-id}:5432/postgres"
|
||||
psql $POSTGRES_URL
|
||||
|
||||
# Redis
|
||||
export REDIS_URL="redis://:devpassword@redis-{workspace-id}:6379"
|
||||
redis-cli -u $REDIS_URL
|
||||
|
||||
# Qdrant
|
||||
export QDRANT_URL="http://qdrant-{workspace-id}:6333"
|
||||
curl $QDRANT_URL/health
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
These are automatically set in your workspace:
|
||||
|
||||
```bash
|
||||
NODE_VERSION=20
|
||||
PYTHON_VERSION=3.12
|
||||
POSTGRES_URL=postgresql://postgres:***@postgres-{id}:5432/postgres
|
||||
REDIS_URL=redis://:***@redis-{id}:6379
|
||||
QDRANT_URL=http://qdrant-{id}:6333
|
||||
```
|
||||
|
||||
## 🔧 Development Workflow
|
||||
|
||||
### Initial Setup
|
||||
|
||||
1. **Access Workspace**
|
||||
```bash
|
||||
# Run environment info script
|
||||
devinfo
|
||||
```
|
||||
|
||||
2. **Verify Services**
|
||||
```bash
|
||||
# Check PostgreSQL
|
||||
pg_isready -h postgres-{workspace-id} -U postgres
|
||||
|
||||
# Check Redis
|
||||
redis-cli -h redis-{workspace-id} ping
|
||||
|
||||
# Check Qdrant
|
||||
curl http://qdrant-{workspace-id}:6333/health
|
||||
```
|
||||
|
||||
### Common Tasks
|
||||
|
||||
#### Next.js Project
|
||||
```bash
|
||||
# Create new Next.js app
|
||||
npx create-next-app@latest my-app
|
||||
cd my-app
|
||||
npm run dev # Accessible at nextjs-3000-{workspace}.dev.lab
|
||||
```
|
||||
|
||||
#### Python FastAPI Project
|
||||
```bash
|
||||
# Activate Python environment
|
||||
source /home/coder/.venv/bin/activate
|
||||
|
||||
# Create FastAPI app
|
||||
uv add fastapi uvicorn
|
||||
# Your app runs on port 8000, accessible via reverse proxy
|
||||
```
|
||||
|
||||
#### Database Development
|
||||
```bash
|
||||
# Connect to PostgreSQL
|
||||
psql $POSTGRES_URL
|
||||
|
||||
# Create tables, run migrations, etc.
|
||||
# Access pgAdmin for GUI management
|
||||
```
|
||||
|
||||
## 🔍 Monitoring & Debugging
|
||||
|
||||
### Built-in Monitoring
|
||||
|
||||
Coder automatically tracks:
|
||||
- **CPU Usage** - Updated every 60 seconds
|
||||
- **RAM Usage** - Updated every 60 seconds
|
||||
- **Disk Usage** - Updated every 5 minutes
|
||||
- **Git Branch** - Updated every 5 minutes
|
||||
|
||||
### Health Checks
|
||||
|
||||
All services include comprehensive health checks:
|
||||
- **PostgreSQL**: `pg_isready` command
|
||||
- **Redis**: Connection test with `redis-cli`
|
||||
- **Qdrant**: HTTP health endpoint
|
||||
- **Web Services**: HTTP response verification
|
||||
|
||||
### Logs and Debugging
|
||||
|
||||
```bash
|
||||
# Container logs
|
||||
docker logs postgres-{workspace-id}
|
||||
docker logs redis-{workspace-id}
|
||||
docker logs qdrant-{workspace-id}
|
||||
|
||||
# Service status
|
||||
docker ps --filter label=coder.workspace_id={workspace-id}
|
||||
|
||||
# Network inspection
|
||||
docker network inspect coder-{workspace-id}
|
||||
```
|
||||
|
||||
## 🔐 Security Considerations
|
||||
|
||||
### Network Security
|
||||
- **Isolated Networks** - Each workspace has its own Docker network
|
||||
- **No Host Exposure** - Services only accessible through authenticated Coder session
|
||||
- **Internal Communication** - Services communicate using internal DNS names
|
||||
|
||||
### Authentication
|
||||
- **Database Passwords** - Configurable via terraform.tfvars
|
||||
- **Coder Authentication** - All access requires Coder login
|
||||
- **sudo Access** - Granted to `coder` user for development flexibility
|
||||
|
||||
### Data Persistence
|
||||
- **Database Data** - Persistent Docker volumes per workspace
|
||||
- **Workspace Files** - Persistent across container restarts
|
||||
- **User Configuration** - Home directory persistence
|
||||
|
||||
## 🚨 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
#### "Unable to find user coder"
|
||||
**Solution**: Container automatically creates `coder` user during startup. If issues persist, check container logs.
|
||||
|
||||
#### Port Already in Use
|
||||
**Solution**: This configuration uses no host port mappings. All routing is handled internally.
|
||||
|
||||
#### Services Not Accessible
|
||||
**Solutions**:
|
||||
1. Verify reverse proxy wildcard configuration
|
||||
2. Check Coder environment variables:
|
||||
```bash
|
||||
echo $CODER_ACCESS_URL
|
||||
echo $CODER_WILDCARD_ACCESS_URL
|
||||
```
|
||||
3. Confirm service health via Coder dashboard
|
||||
|
||||
#### Database Connection Issues
|
||||
**Solutions**:
|
||||
1. Verify service is enabled in workspace parameters
|
||||
2. Check container status: `docker ps`
|
||||
3. Test internal connectivity: `curl http://postgres-{id}:5432`
|
||||
|
||||
### Debug Commands
|
||||
|
||||
```bash
|
||||
# Environment information
|
||||
devinfo
|
||||
|
||||
# Network connectivity
|
||||
docker network ls | grep coder
|
||||
docker network inspect coder-{workspace-id}
|
||||
|
||||
# Service health
|
||||
curl http://qdrant-{workspace-id}:6333/health
|
||||
pg_isready -h postgres-{workspace-id} -U postgres
|
||||
redis-cli -h redis-{workspace-id} ping
|
||||
|
||||
# Container status
|
||||
docker ps --filter label=coder.workspace_id={workspace-id}
|
||||
```
|
||||
|
||||
## 🔄 Updates & Maintenance
|
||||
|
||||
### Updating Tool Versions
|
||||
|
||||
1. **Modify terraform.tfvars**
|
||||
```hcl
|
||||
node_version = "22" # Update Node.js version
|
||||
python_version = "3.13" # Update Python version
|
||||
```
|
||||
|
||||
2. **Apply Changes**
|
||||
```bash
|
||||
terraform plan
|
||||
terraform apply
|
||||
```
|
||||
|
||||
3. **Restart Workspaces** - Changes apply to new workspace instances
|
||||
|
||||
### Adding New Services
|
||||
|
||||
1. **Add to services.tf** - Define new container resource
|
||||
2. **Add to apps.tf** - Create Coder app for access
|
||||
3. **Update variables.tf** - Add configuration options
|
||||
4. **Update startup script** in workspace.tf if needed
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
### Adding New Features
|
||||
|
||||
1. Follow the existing file structure and naming conventions
|
||||
2. Add proper health checks for new services
|
||||
3. Update this README with new service documentation
|
||||
4. Test with multiple concurrent workspaces
|
||||
|
||||
### Best Practices
|
||||
|
||||
- **Resource Labels** - All resources should include `coder.workspace_id` label
|
||||
- **Network Isolation** - New services should join workspace network
|
||||
- **No Host Ports** - Use internal networking only
|
||||
- **Health Checks** - All web services need health check endpoints
|
||||
- **Persistent Data** - Use Docker volumes for data that should survive restarts
|
||||
|
||||
## 📚 References
|
||||
|
||||
- [Coder Documentation](https://coder.com/docs)
|
||||
- [Terraform Docker Provider](https://registry.terraform.io/providers/kreuzwerker/docker/latest/docs)
|
||||
- [PostgreSQL Docker Hub](https://hub.docker.com/_/postgres)
|
||||
- [Redis Docker Hub](https://hub.docker.com/_/redis)
|
||||
- [Qdrant Documentation](https://qdrant.tech/documentation/)
|
||||
|
||||
## 📝 License
|
||||
|
||||
This configuration is provided as-is for development purposes. Modify passwords and security settings for production use.
|
||||
|
||||
---
|
||||
|
||||
**🚀 Happy Coding!** Your isolated development environment is ready for productive development with full database support and AI-powered tools.
|
||||
Refer to [Coder’s devcontainer template guide](https://coder.com/docs/@v2.26.0/admin/templates/managing-templates/devcontainers/add-devcontainer) for broader context on how this Terraform fits into your deployment.
|
||||
|
||||
305
tf/apps.tf
305
tf/apps.tf
@@ -1,13 +1,3 @@
|
||||
# =============================================================================
|
||||
# Coder Applications - Service Access Points
|
||||
# Web interfaces and tools for development services
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# IDE and Code Editor Access
|
||||
# =============================================================================
|
||||
|
||||
# VS Code Server
|
||||
resource "coder_app" "code_server" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "code-server"
|
||||
@@ -16,7 +6,6 @@ resource "coder_app" "code_server" {
|
||||
icon = "/icon/code.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
order = 1
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:8080/healthz"
|
||||
@@ -25,36 +14,23 @@ resource "coder_app" "code_server" {
|
||||
}
|
||||
}
|
||||
|
||||
# Terminal Access
|
||||
resource "coder_app" "terminal" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "terminal"
|
||||
display_name = "Terminal"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash"
|
||||
order = 2
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Service Port Forwarding for Docker Container Access
|
||||
# Note: Using direct container URLs since containers are on same Docker network
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# Database Management Interfaces
|
||||
# =============================================================================
|
||||
|
||||
# pgAdmin - PostgreSQL Administration
|
||||
resource "coder_app" "pgadmin" {
|
||||
count = data.coder_parameter.enable_services.value && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
count = local.services_enabled && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "pgadmin"
|
||||
display_name = "pgAdmin"
|
||||
url = "http://localhost:5050"
|
||||
icon = "/icon/postgresql.svg"
|
||||
icon = "/icon/postgres.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
order = 10
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:5050"
|
||||
@@ -63,263 +39,96 @@ resource "coder_app" "pgadmin" {
|
||||
}
|
||||
}
|
||||
|
||||
# Qdrant Dashboard - Vector Database Management
|
||||
resource "coder_app" "qdrant" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
count = local.services_enabled ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "qdrant-dashboard"
|
||||
display_name = "Qdrant Dashboard"
|
||||
slug = "qdrant"
|
||||
display_name = "Qdrant"
|
||||
url = "http://localhost:6333"
|
||||
icon = "/icon/database.svg"
|
||||
subdomain = false # Changed from true to false
|
||||
subdomain = false
|
||||
share = "owner"
|
||||
order = 11
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:6333/health" # Use proper health endpoint
|
||||
url = "http://localhost:6333/health"
|
||||
interval = 30
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Development Server Ports
|
||||
# =============================================================================
|
||||
|
||||
# Next.js Development Server (default port 3000)
|
||||
resource "coder_app" "nextjs_dev" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "nextjs-3000"
|
||||
display_name = "Next.js Dev Server"
|
||||
url = "http://localhost:3000"
|
||||
icon = "/icon/react.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:3000"
|
||||
interval = 10
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
# Generic Development Server (port 3000)
|
||||
resource "coder_app" "dev_server_3000" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "dev-3000"
|
||||
display_name = "Dev Server (3000)"
|
||||
url = "http://localhost:3000"
|
||||
icon = "/icon/web.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
order = 21
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:3000"
|
||||
interval = 10
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
# API Server - FastAPI/Flask (port 8000)
|
||||
resource "coder_app" "api_server_8000" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "api-8000"
|
||||
display_name = "API Server (8000)"
|
||||
url = "http://localhost:8000"
|
||||
icon = "/icon/api.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
order = 20
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:8000/health"
|
||||
interval = 10
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
# Vite Development Server (port 5173)
|
||||
resource "coder_app" "vite_dev" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "vite-5173"
|
||||
display_name = "Vite Dev Server"
|
||||
url = "http://localhost:5173"
|
||||
icon = "/icon/web.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:5173"
|
||||
interval = 10
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
# Rust Development Server (port 8080)
|
||||
resource "coder_app" "rust_server" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "rust-8080"
|
||||
display_name = "Rust Server (8080)"
|
||||
url = "http://localhost:8080"
|
||||
icon = "/icon/code.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:8080/health"
|
||||
interval = 10
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Data Science and Analytics Tools
|
||||
# =============================================================================
|
||||
|
||||
# Jupyter Lab (if enabled)
|
||||
resource "coder_app" "jupyter" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "jupyter"
|
||||
display_name = "Jupyter Lab"
|
||||
display_name = "JupyterLab"
|
||||
url = "http://localhost:8888"
|
||||
icon = "/icon/python.svg"
|
||||
icon = "/icon/jupyter.svg"
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
|
||||
healthcheck {
|
||||
url = "http://localhost:8888"
|
||||
interval = 15
|
||||
interval = 20
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Utility and Management Applications
|
||||
# =============================================================================
|
||||
|
||||
# Environment Information
|
||||
resource "coder_app" "env_info" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "env-info"
|
||||
display_name = "Environment Info"
|
||||
icon = "/icon/info.svg"
|
||||
command = "bash -c 'if [ -x /home/coder/bin/devinfo ]; then /home/coder/bin/devinfo; elif [ -x $HOME/bin/devinfo ]; then $HOME/bin/devinfo; else echo \"devinfo script not found\"; echo \"Run workspace-setup.sh to install it\"; fi'"
|
||||
locals {
|
||||
dev_ports = {
|
||||
"dev-3000" = {
|
||||
display = "Web Dev (3000)"
|
||||
url = "http://localhost:3000"
|
||||
icon = "/icon/javascript.svg"
|
||||
}
|
||||
"api-8000" = {
|
||||
display = "API (8000)"
|
||||
url = "http://localhost:8000"
|
||||
icon = "/icon/node.svg"
|
||||
}
|
||||
"vite-5173" = {
|
||||
display = "Vite (5173)"
|
||||
url = "http://localhost:5173"
|
||||
icon = "/icon/typescript.svg"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Database Connection Tester
|
||||
resource "coder_app" "db_tester" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "db-tester"
|
||||
display_name = "Database Tester"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash -c 'echo \"=== Database Connection Test ===\"; echo \"PostgreSQL: postgres-${local.workspace_id}:5432\"; echo \"Redis: redis-${local.workspace_id}:6379\"; echo \"Qdrant: qdrant-${local.workspace_id}:6333\"; echo; echo \"Test PostgreSQL:\"; if test -x /usr/bin/pg_isready; then /usr/bin/pg_isready -h postgres-${local.workspace_id} -p 5432 -U postgres || echo \"PostgreSQL not ready\"; else nc -zv postgres-${local.workspace_id} 5432 2>&1 | grep -q succeeded && echo \"PostgreSQL port open\" || echo \"PostgreSQL not accessible\"; fi; echo; echo \"Test Redis:\"; if test -x /usr/bin/redis-cli; then /usr/bin/redis-cli -h redis-${local.workspace_id} -p 6379 -a \"${var.redis_password}\" ping || echo \"Redis not ready\"; else nc -zv redis-${local.workspace_id} 6379 2>&1 | grep -q succeeded && echo \"Redis port open\" || echo \"Redis not accessible\"; fi; echo; echo \"Test Qdrant:\"; curl -f http://qdrant-${local.workspace_id}:6333 2>/dev/null && echo \"Qdrant ready\" || echo \"Qdrant not ready\"; echo; echo \"=== Port Forwarding Status ===\"; ps aux | grep -E \"socat.*5050|socat.*6333\" | grep -v grep || echo \"No port forwarding active\"; echo; read -p \"Press Enter to exit...\"'"
|
||||
}
|
||||
resource "coder_app" "dev_ports" {
|
||||
for_each = local.dev_ports
|
||||
|
||||
# Port Forwarding Setup
|
||||
resource "coder_app" "port_forward" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "port-forward"
|
||||
display_name = "Start Port Forwarding"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash -c 'echo \"Checking port forwarding status...\"; ps aux | grep -E \"socat.*6333\" | grep -v grep || echo \"No Qdrant forwarding active\"; echo; echo \"Starting port forwarding...\"; killall socat 2>/dev/null || true; if command -v socat >/dev/null 2>&1; then nohup socat TCP6-LISTEN:6333,reuseaddr,fork TCP:qdrant-${local.workspace_id}:6333 >/tmp/socat-qdrant.log 2>&1 & echo \"Port forwarding started\"; else echo \"ERROR: socat not installed\"; fi; sleep 2; echo; echo \"Testing Qdrant connection...\"; curl -f http://localhost:6333 2>/dev/null && echo \"SUCCESS: Qdrant accessible\" || echo \"FAILED: Cannot reach Qdrant\"; echo; read -p \"Press Enter to exit...\"'"
|
||||
}
|
||||
|
||||
# Qdrant Direct Access (Alternative)
|
||||
resource "coder_app" "qdrant_direct" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "qdrant"
|
||||
display_name = "Qdrant (Direct)"
|
||||
url = "http://localhost:6333"
|
||||
icon = "/icon/database.svg"
|
||||
subdomain = false
|
||||
slug = each.key
|
||||
display_name = each.value.display
|
||||
url = each.value.url
|
||||
icon = each.value.icon
|
||||
subdomain = true
|
||||
share = "owner"
|
||||
order = 12
|
||||
|
||||
healthcheck {
|
||||
url = each.value.url
|
||||
interval = 10
|
||||
threshold = 10
|
||||
}
|
||||
}
|
||||
|
||||
# Qdrant Direct Test
|
||||
resource "coder_app" "qdrant_test" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
resource "coder_app" "claude_cli" {
|
||||
count = data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "qdrant-test"
|
||||
display_name = "Test Qdrant Access"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash -c 'echo \"Testing Qdrant access...\"; echo; echo \"1. Direct container access:\"; curl -f http://qdrant-${local.workspace_id}:6333 2>&1 | head -20; echo; echo \"2. Localhost forwarded access:\"; curl -f http://localhost:6333 2>&1 | head -20; echo; echo \"3. Socat process status:\"; ps aux | grep socat | grep -v grep || echo \"No socat processes running\"; echo; echo \"4. Socat logs:\"; tail -20 /tmp/socat-qdrant.log 2>/dev/null || echo \"No socat logs found\"; echo; read -p \"Press Enter to exit...\"'"
|
||||
slug = "claude-cli"
|
||||
display_name = "Claude CLI"
|
||||
icon = "/icon/claude.svg"
|
||||
command = "bash -lc 'claude --dangerously-skip-permissions'"
|
||||
group = "AI Tools"
|
||||
order = 10
|
||||
}
|
||||
|
||||
# Development Logs Viewer
|
||||
resource "coder_app" "dev_logs" {
|
||||
resource "coder_app" "codex_cli" {
|
||||
count = data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "dev-logs"
|
||||
display_name = "Development Logs"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash -c 'echo \"=== Coder Agent Logs ===\"; tail -f /tmp/coder-*.log 2>/dev/null || (echo \"No Coder agent logs found in /tmp\"; echo \"Press Ctrl+C to exit log viewer\"; sleep infinity)'"
|
||||
slug = "codex-cli"
|
||||
display_name = "Codex CLI"
|
||||
icon = "/icon/code.svg"
|
||||
command = "bash -lc 'codex --dangerously-bypass-approvals-and-sandbox'"
|
||||
group = "AI Tools"
|
||||
order = 20
|
||||
}
|
||||
|
||||
# Install Socat Manually
|
||||
resource "coder_app" "install_socat" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "install-socat"
|
||||
display_name = "Install Socat"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash -c 'echo \"Installing socat for port forwarding...\"; if command -v apt-get >/dev/null 2>&1; then apt-get update && apt-get install -y socat && echo \"Socat installed successfully!\"; elif command -v apk >/dev/null 2>&1; then apk add --no-cache socat && echo \"Socat installed successfully!\"; else echo \"Cannot determine package manager\"; fi; echo; echo \"Starting port forwarding...\"; nohup socat TCP6-LISTEN:6333,reuseaddr,fork TCP:qdrant-${local.workspace_id}:6333 >/tmp/socat-qdrant.log 2>&1 & nohup socat TCP6-LISTEN:5050,reuseaddr,fork TCP:pgadmin-${local.workspace_id}:80 >/tmp/socat-pgadmin.log 2>&1 & echo \"Port forwarding started!\"; echo; read -p \"Press Enter to exit...\"'"
|
||||
}
|
||||
|
||||
# Check Startup Script Logs
|
||||
resource "coder_app" "startup_logs" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "startup-logs"
|
||||
display_name = "Startup Script Logs"
|
||||
icon = "/icon/terminal.svg"
|
||||
command = "bash -c 'echo \"=== Startup Script Logs ===\"; cat /tmp/startup-script.log 2>/dev/null || echo \"No startup script logs found\"; echo; echo \"=== Checking installed packages ===\"; which socat || echo \"socat: not found\"; which curl || echo \"curl: not found\"; which apt-get || echo \"apt-get: not found\"; echo; read -p \"Press Enter to exit...\"'"
|
||||
}
|
||||
|
||||
# Git Repository Manager
|
||||
resource "coder_app" "git_manager" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "git"
|
||||
display_name = "Git Repository"
|
||||
icon = "/icon/git.svg"
|
||||
command = "bash -c 'cd /workspaces && echo \"=== Git Repository Status ===\"; git status 2>/dev/null || echo \"Not a git repository\"; echo; echo \"=== Recent Commits ===\"; git log --oneline -10 2>/dev/null || echo \"No commits found\"; echo; bash'"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# AI Development Tools Access
|
||||
# =============================================================================
|
||||
|
||||
# Claude Code CLI Access
|
||||
resource "coder_app" "claude_code" {
|
||||
count = data.coder_parameter.enable_ai_tools.value && data.coder_parameter.enable_claude_code.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "claude-code"
|
||||
display_name = "Claude Code"
|
||||
icon = "/icon/ai.svg"
|
||||
command = "claude"
|
||||
}
|
||||
|
||||
# JetBrains Gateway
|
||||
resource "coder_app" "jetbrains_gateway" {
|
||||
count = data.coder_parameter.enable_jetbrains.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "jetbrains-gateway"
|
||||
display_name = "JetBrains Gateway"
|
||||
icon = "/icon/intellij.svg"
|
||||
command = "bash -c 'echo \"🚀 JetBrains Gateway Integration\"; echo \"========================\"; echo \"\"; echo \"📍 Project Folder: /workspaces\"; echo \"🔧 Available IDEs: IntelliJ IDEA Ultimate, WebStorm, PyCharm Professional, GoLand\"; echo \"🌐 Default IDE: IntelliJ IDEA Ultimate\"; echo \"\"; echo \"💡 To connect:\"; echo \" 1. Install JetBrains Gateway on your local machine\"; echo \" 2. Connect to this workspace using Coder Gateway plugin\"; echo \" 3. Select your preferred IDE from the available options\"; echo \"\"; echo \"📚 Documentation: https://coder.com/docs/ides/gateway\"; echo \"\"; read -p \"Press Enter to continue...\";'"
|
||||
order = 3
|
||||
}
|
||||
|
||||
# File Manager
|
||||
resource "coder_app" "file_manager" {
|
||||
agent_id = coder_agent.main.id
|
||||
slug = "files"
|
||||
display_name = "File Manager"
|
||||
icon = "/icon/folder.svg"
|
||||
command = "bash -c 'export TERM=xterm-256color && cd /workspaces && (command -v ranger >/dev/null 2>&1 && ranger || (echo \"ranger not installed - using ls instead\"; echo; ls -la; echo; bash))'"
|
||||
order = 5
|
||||
}
|
||||
317
tf/main.tf
317
tf/main.tf
@@ -1,271 +1,160 @@
|
||||
terraform {
|
||||
required_version = ">= 1.0"
|
||||
required_version = ">= 1.3.0"
|
||||
|
||||
required_providers {
|
||||
coder = {
|
||||
source = "coder/coder"
|
||||
version = "~> 2.0"
|
||||
version = ">= 2.7"
|
||||
}
|
||||
docker = {
|
||||
source = "kreuzwerker/docker"
|
||||
version = "~> 2.25"
|
||||
}
|
||||
envbuilder = {
|
||||
source = "coder/envbuilder"
|
||||
version = "~> 1.0"
|
||||
http = {
|
||||
source = "hashicorp/http"
|
||||
version = ">= 3.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "coder" {}
|
||||
|
||||
provider "docker" {
|
||||
host = var.docker_socket != "" ? var.docker_socket : null
|
||||
}
|
||||
provider "envbuilder" {}
|
||||
|
||||
# Data Sources
|
||||
provider "http" {}
|
||||
|
||||
# Workspace context
|
||||
|
||||
data "coder_provisioner" "me" {}
|
||||
data "coder_workspace" "me" {}
|
||||
data "coder_workspace_owner" "me" {}
|
||||
|
||||
# Parameters
|
||||
data "coder_parameter" "repo" {
|
||||
name = "repo"
|
||||
display_name = "Repository"
|
||||
description = "Select a repository to clone"
|
||||
mutable = true
|
||||
order = 1
|
||||
# User inputs kept intentionally small so the template is easy to launch.
|
||||
|
||||
option {
|
||||
name = "Custom Development Environment"
|
||||
description = "Full-stack development with all services"
|
||||
value = "custom"
|
||||
}
|
||||
option {
|
||||
name = "vercel/next.js"
|
||||
description = "The React Framework"
|
||||
value = "https://github.com/vercel/next.js"
|
||||
}
|
||||
option {
|
||||
name = "Custom URL"
|
||||
description = "Specify a custom repo URL below"
|
||||
value = "custom-url"
|
||||
}
|
||||
}
|
||||
|
||||
data "coder_parameter" "custom_repo_url" {
|
||||
name = "custom_repo_url"
|
||||
display_name = "Custom Repository URL"
|
||||
description = "Enter a custom repository URL"
|
||||
data "coder_parameter" "project_repository" {
|
||||
name = "project_repository"
|
||||
display_name = "Project repository"
|
||||
description = "Optional Git URL cloned into /workspaces on first startup."
|
||||
default = ""
|
||||
mutable = true
|
||||
order = 2
|
||||
order = 1
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_services" {
|
||||
name = "enable_services"
|
||||
display_name = "Enable Database Services"
|
||||
description = "Enable PostgreSQL, Redis, Qdrant, and Docker Registry"
|
||||
display_name = "Enable PostgreSQL / Redis / Qdrant"
|
||||
description = "Provision bundled data services inside the workspace network."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 2
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_ai_tools" {
|
||||
name = "enable_ai_tools"
|
||||
display_name = "Install AI tooling"
|
||||
description = "Run the bundled AI helper scripts (Claude, Cursor, Windsurf)."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 3
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_ai_tools" {
|
||||
name = "enable_ai_tools"
|
||||
display_name = "Enable AI Assistant Tools"
|
||||
description = "Install Claude Code and AI development tools"
|
||||
data "coder_parameter" "enable_pgadmin" {
|
||||
name = "enable_pgadmin"
|
||||
display_name = "Expose pgAdmin"
|
||||
description = "Start the pgAdmin container when database services are enabled."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 4
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_claude_code" {
|
||||
name = "enable_claude_code"
|
||||
display_name = "Enable Claude Code CLI"
|
||||
description = "Install Claude Code command-line interface"
|
||||
data "coder_parameter" "enable_jupyter" {
|
||||
name = "enable_jupyter"
|
||||
display_name = "Expose JupyterLab"
|
||||
description = "Start the optional JupyterLab container."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
default = "false"
|
||||
mutable = true
|
||||
order = 5
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_cursor_support" {
|
||||
name = "enable_cursor_support"
|
||||
display_name = "Enable Cursor IDE Support"
|
||||
description = "Install Cursor IDE configuration and settings"
|
||||
data "coder_parameter" "enable_jetbrains" {
|
||||
name = "enable_jetbrains"
|
||||
display_name = "JetBrains Gateway"
|
||||
description = "Install JetBrains Gateway integration for this workspace."
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 6
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_windsurf_support" {
|
||||
name = "enable_windsurf_support"
|
||||
display_name = "Enable Windsurf IDE Support"
|
||||
description = "Install Windsurf IDE configuration and settings"
|
||||
type = "bool"
|
||||
default = "true"
|
||||
data "coder_parameter" "ai_prompt" {
|
||||
name = "AI Prompt"
|
||||
display_name = "AI Task Prompt"
|
||||
description = "Optional pre-filled prompt shown when starting a Claude Code task."
|
||||
type = "string"
|
||||
default = ""
|
||||
mutable = true
|
||||
order = 7
|
||||
form_type = "textarea"
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jetbrains" {
|
||||
name = "enable_jetbrains"
|
||||
display_name = "Enable JetBrains Gateway"
|
||||
description = "Enable JetBrains Gateway integration for remote development"
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 8
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_jupyter" {
|
||||
name = "enable_jupyter"
|
||||
display_name = "Enable Jupyter Lab"
|
||||
description = "Enable Jupyter Lab for data science and notebook development"
|
||||
type = "bool"
|
||||
default = "false"
|
||||
mutable = true
|
||||
order = 9
|
||||
}
|
||||
|
||||
data "coder_parameter" "enable_pgadmin" {
|
||||
name = "enable_pgadmin"
|
||||
display_name = "Enable pgAdmin"
|
||||
description = "Enable pgAdmin web interface for PostgreSQL management"
|
||||
type = "bool"
|
||||
default = "true"
|
||||
mutable = true
|
||||
order = 10
|
||||
}
|
||||
|
||||
# Local Variables
|
||||
locals {
|
||||
# Container and workspace naming - use ID for immutability
|
||||
container_name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}"
|
||||
workspace_id = data.coder_workspace.me.id
|
||||
# Use workspace ID for volume naming to prevent destruction on rename
|
||||
volume_suffix = substr(data.coder_workspace.me.id, 0, 8)
|
||||
bind_mounts = [
|
||||
{ host = "${var.host_home_path}/.gitconfig", container = "/home/coder/.gitconfig" },
|
||||
{ host = "${var.host_home_path}/.git-credentials", container = "/home/coder/.git-credentials" },
|
||||
{ host = "${var.host_home_path}/.ssh", container = "/home/coder/.ssh" },
|
||||
{ host = "${var.host_home_path}/.zshrc", container = "/home/coder/.zshrc" },
|
||||
{ host = "${var.host_home_path}/.oh-my-zsh", container = "/home/coder/.oh-my-zsh" },
|
||||
{ host = "${var.host_home_path}/.zsh_history", container = "/home/coder/.zsh_history" },
|
||||
{ host = "${var.host_home_path}/.p10k.zsh", container = "/home/coder/.p10k.zsh" },
|
||||
{ host = "${var.host_home_path}/.claude", container = "/home/coder/.claude" },
|
||||
{ host = "${var.host_home_path}/.codex", container = "/home/coder/.codex" },
|
||||
{ host = "${var.host_home_path}/.1password", container = "/home/coder/.1password" },
|
||||
{ host = "${var.host_home_path}/.config", container = "/home/coder/.config" },
|
||||
{ host = "${var.host_home_path}", container = "/home/coder", read_only = false },
|
||||
{ host = "${var.host_home_path}/.local", container = "/home/coder/.local" },
|
||||
{ host = "${var.host_home_path}/.cache", container = "/home/coder/.cache" },
|
||||
{ host = "${var.host_home_path}/.docker/config.json", container = "/home/coder/.docker/config.json" },
|
||||
{ host = "${var.host_home_path}/code-tools", container = "/home/coder/code-tools" },
|
||||
{ host = "${var.host_home_path}/claude-scripts", container = "/home/coder/claude-scripts" },
|
||||
]
|
||||
|
||||
workspace_id = data.coder_workspace.me.id
|
||||
container_name = "coder-${local.workspace_id}"
|
||||
|
||||
# Git configuration
|
||||
git_author_name = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name)
|
||||
git_author_email = data.coder_workspace_owner.me.email
|
||||
|
||||
# Repository URL logic
|
||||
repo_url = (
|
||||
data.coder_parameter.repo.value == "custom" ? "https://github.com/coder/envbuilder" :
|
||||
data.coder_parameter.repo.value == "custom-url" ? data.coder_parameter.custom_repo_url.value :
|
||||
data.coder_parameter.repo.value
|
||||
)
|
||||
project_repo_url = trimspace(data.coder_parameter.project_repository.value)
|
||||
repo_clone_command = local.project_repo_url != "" ? "git clone ${local.project_repo_url} /workspaces" : "echo 'No repository requested'"
|
||||
|
||||
# Development container image is now built by envbuilder
|
||||
# devcontainer_image = var.devcontainer_image # Deprecated
|
||||
services_enabled = data.coder_parameter.enable_services.value
|
||||
pgadmin_enabled = data.coder_parameter.enable_pgadmin.value
|
||||
jupyter_enabled = data.coder_parameter.enable_jupyter.value
|
||||
port_forwarding = local.services_enabled || local.jupyter_enabled
|
||||
|
||||
# Environment variables for the development container
|
||||
dev_environment = {
|
||||
# Git configuration
|
||||
"GIT_AUTHOR_NAME" = local.git_author_name
|
||||
"GIT_AUTHOR_EMAIL" = local.git_author_email
|
||||
"GIT_COMMITTER_NAME" = local.git_author_name
|
||||
"GIT_COMMITTER_EMAIL" = local.git_author_email
|
||||
|
||||
# Development tools
|
||||
"NODE_VERSION" = var.node_version
|
||||
"PYTHON_VERSION" = var.python_version
|
||||
"RUST_VERSION" = "stable"
|
||||
|
||||
# Service URLs (when services are enabled)
|
||||
"POSTGRES_URL" = data.coder_parameter.enable_services.value ? "postgresql://postgres:${var.postgres_password}@postgres-${local.workspace_id}:5432/postgres" : ""
|
||||
"REDIS_URL" = data.coder_parameter.enable_services.value ? "redis://:${var.redis_password}@redis-${local.workspace_id}:6379" : ""
|
||||
"QDRANT_URL" = data.coder_parameter.enable_services.value ? "http://qdrant-${local.workspace_id}:6333" : ""
|
||||
|
||||
# Development configuration
|
||||
"EDITOR" = "code"
|
||||
"PYTHONPATH" = "/workspaces"
|
||||
"CARGO_HOME" = "/home/coder/.cargo"
|
||||
"RUSTUP_HOME" = "/home/coder/.rustup"
|
||||
}
|
||||
|
||||
# Legacy service URLs for backward compatibility
|
||||
postgres_url = "postgresql://postgres:${var.postgres_password}@postgres-${local.workspace_id}:5432/postgres"
|
||||
redis_url = "redis://:${var.redis_password}@redis-${local.workspace_id}:6379"
|
||||
qdrant_url = "http://qdrant-${local.workspace_id}:6333"
|
||||
|
||||
# Port forwarding script for services
|
||||
port_forward_script = <<-SCRIPT
|
||||
#!/bin/bash
|
||||
export NVM_SYMLINK_CURRENT=false
|
||||
export CODER_WORKSPACE_ID="${local.workspace_id}"
|
||||
echo 'Starting workspace with services enabled...'
|
||||
|
||||
# Ensure tools are in PATH
|
||||
export PATH=/usr/bin:/usr/local/bin:$$PATH
|
||||
|
||||
# Install essential tools - run synchronously to avoid conflicts
|
||||
echo "Installing essential tools for port forwarding..."
|
||||
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
# Wait for any existing APT operations to complete
|
||||
echo "Checking APT availability..."
|
||||
|
||||
# Try up to 12 times with 5 second delays (60 seconds total)
|
||||
for i in 1 2 3 4 5 6 7 8 9 10 11 12; do
|
||||
if ! fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1; then
|
||||
echo "APT is available"
|
||||
break
|
||||
fi
|
||||
echo "Waiting for APT to become available... attempt $$i of 12"
|
||||
sleep 5
|
||||
done
|
||||
|
||||
# Now install our tools
|
||||
echo "Updating package lists..."
|
||||
apt-get update -qq || true
|
||||
|
||||
echo "Installing socat for port forwarding..."
|
||||
# Retry socat installation if it fails
|
||||
for attempt in 1 2 3; do
|
||||
if apt-get install -y socat 2>/dev/null; then
|
||||
echo "Socat installed successfully"
|
||||
break
|
||||
else
|
||||
echo "Failed to install socat (attempt $$attempt), retrying..."
|
||||
sleep 5
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Installing other tools..."
|
||||
apt-get install -y ranger postgresql-client redis-tools || echo "Some tools failed to install"
|
||||
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
# Alpine Linux
|
||||
apk add --no-cache socat ranger postgresql-client redis || true
|
||||
fi
|
||||
|
||||
# Start port forwarding only after tools are installed
|
||||
echo "Setting up port forwarding..."
|
||||
|
||||
if command -v socat >/dev/null 2>&1; then
|
||||
echo "Starting port forwarding..."
|
||||
if [ "${data.coder_parameter.enable_pgadmin.value}" = "true" ]; then
|
||||
echo 'Forwarding pgAdmin (localhost:5050 -> pgadmin-${local.workspace_id}:80)...'
|
||||
nohup socat TCP-LISTEN:5050,reuseaddr,fork TCP:pgadmin-${local.workspace_id}:80 >/tmp/socat-pgadmin.log 2>&1 &
|
||||
fi
|
||||
echo 'Forwarding Qdrant (localhost:6333 -> qdrant-${local.workspace_id}:6333)...'
|
||||
nohup socat TCP6-LISTEN:6333,reuseaddr,fork TCP:qdrant-${local.workspace_id}:6333 >/tmp/socat-qdrant.log 2>&1 &
|
||||
echo "Port forwarding started"
|
||||
else
|
||||
echo "Warning: socat not available, port forwarding cannot start"
|
||||
echo "You may need to manually install socat and restart port forwarding"
|
||||
fi
|
||||
|
||||
echo 'Workspace startup initiated.'
|
||||
SCRIPT
|
||||
agent_startup = join("\n", compact([
|
||||
"set -eu",
|
||||
"export CODER_WORKSPACE_ID=${local.workspace_id}",
|
||||
"git config --global user.name \"${local.git_author_name}\"",
|
||||
"git config --global user.email \"${local.git_author_email}\"",
|
||||
local.project_repo_url != "" ? "if [ ! -d /workspaces/.git ]; then ${local.repo_clone_command}; fi" : "",
|
||||
"export ENABLE_PGADMIN=${tostring(local.pgadmin_enabled)}",
|
||||
"export ENABLE_JUPYTER=${tostring(local.jupyter_enabled)}",
|
||||
local.port_forwarding ? "bash /home/coder/code-tools/tf/scripts/port-forward.sh" : "echo 'No service port forwarding requested'"
|
||||
]))
|
||||
}
|
||||
|
||||
# Docker Network
|
||||
# Workspace network keeps the workspace stack isolated from the host.
|
||||
resource "docker_network" "workspace" {
|
||||
name = "coder-${local.workspace_id}"
|
||||
driver = "bridge"
|
||||
@@ -274,17 +163,14 @@ resource "docker_network" "workspace" {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
}
|
||||
labels {
|
||||
label = "coder.project"
|
||||
value = var.project_name
|
||||
}
|
||||
}
|
||||
|
||||
# Workspace Volume
|
||||
# Persistent workspace data volume mounted at /workspaces inside the container.
|
||||
resource "docker_volume" "workspaces" {
|
||||
name = "workspaces-${local.workspace_id}"
|
||||
|
||||
@@ -292,31 +178,16 @@ resource "docker_volume" "workspaces" {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.type"
|
||||
value = "workspace-data"
|
||||
}
|
||||
}
|
||||
|
||||
# Temporarily use the base image until we can push the devcontainer files
|
||||
# TODO: Uncomment the envbuilder block after pushing to git
|
||||
# resource "envbuilder_cached_image" "devcontainer" {
|
||||
# builder_image = "ghcr.io/coder/envbuilder:latest"
|
||||
# git_url = var.devcontainer_repo_url
|
||||
#
|
||||
# # Use the devcontainer.json from our repository
|
||||
# devcontainer_dir = ".devcontainer"
|
||||
#
|
||||
# # Cache settings for faster builds
|
||||
# cache_repo = var.envbuilder_cache_repo
|
||||
# }
|
||||
|
||||
# Development Container Image - using base image for now
|
||||
# Separate persistent home directory for the coder user.
|
||||
# Base development container image (customise via terraform.tfvars).
|
||||
resource "docker_image" "devcontainer" {
|
||||
name = var.devcontainer_image
|
||||
keep_locally = true
|
||||
}
|
||||
}
|
||||
|
||||
107
tf/outputs.tf
107
tf/outputs.tf
@@ -1,119 +1,46 @@
|
||||
# =============================================================================
|
||||
# Terraform Outputs
|
||||
# Expose important values for reference and external use
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# Workspace Information
|
||||
# =============================================================================
|
||||
|
||||
output "workspace_id" {
|
||||
description = "Unique identifier for the Coder workspace"
|
||||
description = "Coder workspace ID"
|
||||
value = local.workspace_id
|
||||
}
|
||||
|
||||
output "workspace_name" {
|
||||
description = "Name of the Coder workspace"
|
||||
description = "Coder workspace name"
|
||||
value = data.coder_workspace.me.name
|
||||
}
|
||||
|
||||
output "workspace_owner" {
|
||||
description = "Owner of the Coder workspace"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
}
|
||||
|
||||
output "container_name" {
|
||||
description = "Name of the main development container"
|
||||
description = "Name of the workspace Docker container"
|
||||
value = local.container_name
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Git Configuration
|
||||
# =============================================================================
|
||||
|
||||
output "git_author_name" {
|
||||
description = "Git author name configured for the workspace"
|
||||
value = local.git_author_name
|
||||
output "project_repository" {
|
||||
description = "Repository cloned into /workspaces on first startup"
|
||||
value = local.project_repo_url
|
||||
}
|
||||
|
||||
output "git_author_email" {
|
||||
description = "Git author email configured for the workspace"
|
||||
value = local.git_author_email
|
||||
output "postgres_url" {
|
||||
description = "Internal PostgreSQL connection string"
|
||||
value = local.services_enabled ? local.postgres_url : null
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
output "repository_url" {
|
||||
description = "Repository URL that will be cloned"
|
||||
value = local.repo_url
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Service Connection Information
|
||||
# =============================================================================
|
||||
|
||||
output "postgres_connection_url" {
|
||||
description = "PostgreSQL connection URL for applications"
|
||||
value = data.coder_parameter.enable_services.value ? local.postgres_url : null
|
||||
output "redis_url" {
|
||||
description = "Internal Redis connection string"
|
||||
value = local.services_enabled ? local.redis_url : null
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
output "redis_connection_url" {
|
||||
description = "Redis connection URL for applications"
|
||||
value = data.coder_parameter.enable_services.value ? local.redis_url : null
|
||||
sensitive = true
|
||||
output "qdrant_url" {
|
||||
description = "Internal Qdrant endpoint"
|
||||
value = local.services_enabled ? local.qdrant_url : null
|
||||
}
|
||||
|
||||
output "qdrant_api_url" {
|
||||
description = "Qdrant vector database API URL"
|
||||
value = data.coder_parameter.enable_services.value ? local.qdrant_url : null
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Network Information
|
||||
# =============================================================================
|
||||
|
||||
output "docker_network_name" {
|
||||
description = "Name of the Docker network for service communication"
|
||||
description = "Docker network assigned to this workspace"
|
||||
value = docker_network.workspace.name
|
||||
}
|
||||
|
||||
output "workspace_volume_name" {
|
||||
description = "Name of the persistent workspace volume"
|
||||
description = "Docker volume used for /workspaces"
|
||||
value = docker_volume.workspaces.name
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Development Environment Information
|
||||
# =============================================================================
|
||||
|
||||
output "development_tools" {
|
||||
description = "Information about installed development tools"
|
||||
value = {
|
||||
node_version = var.node_version
|
||||
python_version = var.python_version
|
||||
container_image = docker_image.devcontainer.name
|
||||
git_repo_url = var.devcontainer_repo_url
|
||||
}
|
||||
}
|
||||
|
||||
output "service_status" {
|
||||
description = "Status of optional development services"
|
||||
value = {
|
||||
services_enabled = data.coder_parameter.enable_services.value
|
||||
ai_tools_enabled = data.coder_parameter.enable_ai_tools.value
|
||||
pgadmin_enabled = var.enable_pgadmin
|
||||
jupyter_enabled = var.enable_jupyter
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Access Information
|
||||
# =============================================================================
|
||||
|
||||
output "external_ports" {
|
||||
description = "External ports exposed for services"
|
||||
value = data.coder_parameter.enable_services.value ? {
|
||||
pgadmin = var.enable_pgadmin ? var.pgadmin_port : null
|
||||
} : {}
|
||||
}
|
||||
126
tf/scripts.tf
126
tf/scripts.tf
@@ -1,86 +1,68 @@
|
||||
# =============================================================================
|
||||
# Provisioning Scripts - AI Development Tools and Extensions
|
||||
# Installation scripts for Cursor, Claude Code, Windsurf support
|
||||
# With inline line ending fixes for GUI paste issues
|
||||
# =============================================================================
|
||||
locals {
|
||||
core_scripts = {
|
||||
workspace = {
|
||||
display = "Setup Development Workspace"
|
||||
icon = "/icon/container.svg"
|
||||
path = "${path.module}/scripts/workspace-setup.sh"
|
||||
}
|
||||
dev_tools = {
|
||||
display = "Install Development Tools"
|
||||
icon = "/icon/code.svg"
|
||||
path = "${path.module}/scripts/dev-tools.sh"
|
||||
}
|
||||
git_hooks = {
|
||||
display = "Configure Git Hooks"
|
||||
icon = "/icon/git.svg"
|
||||
path = "${path.module}/scripts/git-hooks.sh"
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Claude Code CLI Installation
|
||||
# =============================================================================
|
||||
|
||||
resource "coder_script" "claude_code_setup" {
|
||||
count = data.coder_parameter.enable_ai_tools.value && data.coder_parameter.enable_claude_code.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
display_name = "Install Claude Code CLI"
|
||||
icon = "/icon/ai.svg"
|
||||
run_on_start = true
|
||||
|
||||
script = "echo '${base64encode(file("${path.module}/scripts/claude-install.sh"))}' | base64 -d | tr -d '\\r' | bash"
|
||||
ai_scripts = {
|
||||
claude = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_claude_code
|
||||
display = "Install Claude CLI"
|
||||
icon = "/icon/claude.svg"
|
||||
path = "${path.module}/scripts/claude-install.sh"
|
||||
}
|
||||
codex = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_codex_support
|
||||
display = "Install Codex CLI"
|
||||
icon = "/icon/code.svg"
|
||||
path = "${path.module}/scripts/codex-setup.sh"
|
||||
}
|
||||
cursor = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_cursor_support
|
||||
display = "Configure Cursor"
|
||||
icon = "/icon/cursor.svg"
|
||||
path = "${path.module}/scripts/cursor-setup.sh"
|
||||
}
|
||||
windsurf = {
|
||||
enabled = data.coder_parameter.enable_ai_tools.value && var.install_windsurf_support
|
||||
display = "Configure Windsurf"
|
||||
icon = "/icon/windsurf.svg"
|
||||
path = "${path.module}/scripts/windsurf-setup.sh"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Cursor IDE Support Setup
|
||||
# =============================================================================
|
||||
resource "coder_script" "core" {
|
||||
for_each = local.core_scripts
|
||||
|
||||
resource "coder_script" "cursor_setup" {
|
||||
count = data.coder_parameter.enable_ai_tools.value && data.coder_parameter.enable_cursor_support.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
display_name = "Configure Cursor IDE Support"
|
||||
icon = "/icon/code.svg"
|
||||
display_name = each.value.display
|
||||
icon = each.value.icon
|
||||
run_on_start = true
|
||||
|
||||
script = "echo '${base64encode(file("${path.module}/scripts/cursor-setup.sh"))}' | base64 -d | tr -d '\\r' | bash"
|
||||
script = "echo '${base64encode(file(each.value.path))}' | base64 -d | tr -d '\\r' | bash"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Windsurf IDE Support Setup
|
||||
# =============================================================================
|
||||
resource "coder_script" "ai" {
|
||||
for_each = { for key, value in local.ai_scripts : key => value if value.enabled }
|
||||
|
||||
resource "coder_script" "windsurf_setup" {
|
||||
count = data.coder_parameter.enable_ai_tools.value && data.coder_parameter.enable_windsurf_support.value ? 1 : 0
|
||||
agent_id = coder_agent.main.id
|
||||
display_name = "Configure Windsurf IDE Support"
|
||||
icon = "/icon/code.svg"
|
||||
display_name = each.value.display
|
||||
icon = each.value.icon
|
||||
run_on_start = true
|
||||
|
||||
script = "echo '${base64encode(file("${path.module}/scripts/windsurf-setup.sh"))}' | base64 -d | tr -d '\\r' | bash"
|
||||
script = "echo '${base64encode(file(each.value.path))}' | base64 -d | tr -d '\\r' | bash"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Development Tools and Extensions
|
||||
# =============================================================================
|
||||
|
||||
resource "coder_script" "dev_extensions" {
|
||||
agent_id = coder_agent.main.id
|
||||
display_name = "Install Development Tools"
|
||||
icon = "/icon/tools.svg"
|
||||
run_on_start = true
|
||||
|
||||
script = "echo '${base64encode(file("${path.module}/scripts/dev-tools.sh"))}' | base64 -d | tr -d '\\r' | bash"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Git Hooks and Metadata Capture Setup
|
||||
# =============================================================================
|
||||
|
||||
resource "coder_script" "git_hooks_setup" {
|
||||
agent_id = coder_agent.main.id
|
||||
display_name = "Setup Git Hooks"
|
||||
icon = "/icon/git.svg"
|
||||
run_on_start = true
|
||||
|
||||
script = "echo '${base64encode(file("${path.module}/scripts/git-hooks.sh"))}' | base64 -d | tr -d '\\r' | bash"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Workspace Setup (Comprehensive Environment Configuration)
|
||||
# =============================================================================
|
||||
|
||||
resource "coder_script" "workspace_setup" {
|
||||
agent_id = coder_agent.main.id
|
||||
display_name = "Setup Development Workspace"
|
||||
icon = "/icon/tools.svg"
|
||||
run_on_start = true
|
||||
|
||||
script = "CODER_WORKSPACE_ID=${local.workspace_id} echo '${base64encode(file("${path.module}/scripts/workspace-setup.sh"))}' | base64 -d | tr -d '\\r' | bash"
|
||||
}
|
||||
132
tf/scripts/claude-install.sh
Normal file → Executable file
132
tf/scripts/claude-install.sh
Normal file → Executable file
@@ -1,117 +1,31 @@
|
||||
#!/bin/bash
|
||||
# Convert CRLF to LF if present (handles Windows line endings)
|
||||
if command -v dos2unix >/dev/null 2>&1; then
|
||||
dos2unix "$0" 2>/dev/null || true
|
||||
fi
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
set -e
|
||||
echo "🤖 Installing Claude Code CLI..."
|
||||
|
||||
# Check if already installed
|
||||
if command -v claude >/dev/null 2>&1; then
|
||||
echo "✅ Claude Code already installed"
|
||||
claude --version || echo "Claude Code version check failed"
|
||||
exit 0
|
||||
echo "Claude CLI already installed ($(claude --version))"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Cross-platform home directory detection
|
||||
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
|
||||
HOME_DIR="${USERPROFILE:-$HOME}"
|
||||
BIN_DIR="$HOME_DIR/bin"
|
||||
# Windows/WSL/Git Bash compatibility
|
||||
export NVM_DIR="$HOME_DIR/.nvm"
|
||||
else
|
||||
HOME_DIR="$HOME"
|
||||
BIN_DIR="/home/coder/bin"
|
||||
export NVM_DIR="$HOME/.nvm"
|
||||
mkdir -p "$HOME/bin"
|
||||
|
||||
if ! command -v npm >/dev/null 2>&1; then
|
||||
echo "npm is required to install the Claude CLI" >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Ensure npm is available
|
||||
# First, try to find npm in common locations
|
||||
NPM_PATHS=(
|
||||
"/usr/bin/npm"
|
||||
"/usr/local/bin/npm"
|
||||
"$HOME/.nvm/versions/node/*/bin/npm"
|
||||
"/home/coder/.nvm/versions/node/*/bin/npm"
|
||||
"/opt/nodejs/bin/npm"
|
||||
)
|
||||
npm install -g @anthropic-ai/claude-code >/dev/null
|
||||
|
||||
NPM_CMD=""
|
||||
for npm_path in "${NPM_PATHS[@]}"; do
|
||||
if [[ -f "$npm_path" ]] || [[ -x "$npm_path" ]]; then
|
||||
NPM_CMD="$npm_path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
cat <<'SCRIPT' > "$HOME/bin/claude-help"
|
||||
#!/usr/bin/env bash
|
||||
cat <<'TXT'
|
||||
Claude CLI quick start
|
||||
----------------------
|
||||
claude auth login # authenticate
|
||||
claude chat # open an interactive chat
|
||||
claude edit <file> # AI assisted editing
|
||||
claude analyze . # Review the current directory
|
||||
TXT
|
||||
SCRIPT
|
||||
chmod +x "$HOME/bin/claude-help"
|
||||
|
||||
# Try sourcing nvm if npm not found yet
|
||||
if [[ -z "$NPM_CMD" ]] && [[ -s "$NVM_DIR/nvm.sh" ]]; then
|
||||
# Use POSIX-compatible sourcing
|
||||
. "$NVM_DIR/nvm.sh"
|
||||
NPM_CMD=$(command -v npm 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
# Final check for npm in PATH
|
||||
if [[ -z "$NPM_CMD" ]]; then
|
||||
NPM_CMD=$(command -v npm 2>/dev/null || true)
|
||||
fi
|
||||
|
||||
if [[ -z "$NPM_CMD" ]] || [[ ! -x "$NPM_CMD" ]]; then
|
||||
echo "❌ npm not found - Node.js installation required"
|
||||
echo "Searched in: ${NPM_PATHS[*]}"
|
||||
echo "PATH: $PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Found npm at: $NPM_CMD"
|
||||
|
||||
echo "📥 Installing Claude Code CLI..."
|
||||
$NPM_CMD install -g @anthropic-ai/claude-code
|
||||
|
||||
# Verify installation
|
||||
if command -v claude >/dev/null 2>&1; then
|
||||
echo "✅ Claude Code installed successfully!"
|
||||
echo "🔧 Run 'claude auth login' to authenticate"
|
||||
echo "💡 Use 'claude chat' for interactive assistance"
|
||||
echo "💡 Use 'claude edit <file>' to edit files with AI"
|
||||
|
||||
# Create helper script with proper line endings
|
||||
mkdir -p "$BIN_DIR"
|
||||
|
||||
# Use printf instead of cat with heredoc to ensure consistent line endings
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf 'echo "🤖 Claude Code AI Assistant"\n'
|
||||
printf 'echo "=========================="\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "Authentication:"\n'
|
||||
printf 'echo " claude auth login # Authenticate with Anthropic"\n'
|
||||
printf 'echo " claude auth logout # Sign out"\n'
|
||||
printf 'echo " claude auth whoami # Check current user"\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "Interactive Chat:"\n'
|
||||
printf 'echo " claude chat # Start interactive session"\n'
|
||||
printf 'echo " claude chat '\''question'\'' # Single question"\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "File Editing:"\n'
|
||||
printf 'echo " claude edit file.py # AI-powered file editing"\n'
|
||||
printf 'echo " claude edit --help # Edit command options"\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "Code Analysis:"\n'
|
||||
printf 'echo " claude analyze . # Analyze current directory"\n'
|
||||
printf 'echo " claude review file.py # Code review"\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "Project Operations:"\n'
|
||||
printf 'echo " claude init # Initialize Claude in project"\n'
|
||||
printf 'echo " claude status # Show project status"\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "💡 For full documentation: https://docs.anthropic.com/claude/docs"\n'
|
||||
} > "$BIN_DIR/claude-help"
|
||||
|
||||
chmod +x "$BIN_DIR/claude-help"
|
||||
|
||||
echo "💡 Run 'claude-help' for quick reference"
|
||||
else
|
||||
echo "❌ Claude Code installation failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "Claude CLI installed. Run 'claude-help' for usage tips."
|
||||
|
||||
15
tf/scripts/codex-setup.sh
Executable file
15
tf/scripts/codex-setup.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if ! command -v npm >/dev/null 2>&1; then
|
||||
echo 'npm not found; skipping Codex CLI install.' >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if command -v sudo >/dev/null 2>&1; then
|
||||
sudo npm install -g @openai/codex >/dev/null 2>&1 || sudo npm install -g @openai/codex
|
||||
else
|
||||
npm install -g @openai/codex >/dev/null 2>&1 || npm install -g @openai/codex
|
||||
fi
|
||||
|
||||
echo 'OpenAI Codex CLI installed or already present.'
|
||||
220
tf/scripts/cursor-setup.sh
Normal file → Executable file
220
tf/scripts/cursor-setup.sh
Normal file → Executable file
@@ -1,203 +1,27 @@
|
||||
#!/bin/bash
|
||||
# Convert CRLF to LF if present (handles Windows line endings)
|
||||
if command -v dos2unix >/dev/null 2>&1; then
|
||||
dos2unix "$0" 2>/dev/null || true
|
||||
fi
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
set -e
|
||||
echo "🎯 Setting up Cursor IDE support..."
|
||||
CURSOR_DIR="$HOME/.cursor-server"
|
||||
mkdir -p "$CURSOR_DIR/data/User" "$CURSOR_DIR/extensions"
|
||||
|
||||
# Cross-platform user and directory detection
|
||||
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
|
||||
HOME_DIR="${USERPROFILE:-$HOME}"
|
||||
USER_NAME="${USERNAME:-${USER:-coder}}"
|
||||
CURSOR_DIR="$HOME_DIR/.cursor-server"
|
||||
else
|
||||
HOME_DIR="${HOME:-/home/coder}"
|
||||
USER_NAME="${USER:-coder}"
|
||||
CURSOR_DIR="$HOME_DIR/.cursor-server"
|
||||
fi
|
||||
|
||||
# Create Cursor configuration directories
|
||||
mkdir -p "$CURSOR_DIR/data/User"
|
||||
mkdir -p "$CURSOR_DIR/extensions"
|
||||
|
||||
# Create optimized Cursor settings using printf to ensure LF line endings
|
||||
cat <<'JSON' > "$CURSOR_DIR/data/User/settings.json"
|
||||
{
|
||||
printf '{\n'
|
||||
printf ' "workbench.colorTheme": "Dark+ (default dark)",\n'
|
||||
printf ' "editor.fontSize": 14,\n'
|
||||
printf ' "editor.tabSize": 2,\n'
|
||||
printf ' "editor.insertSpaces": true,\n'
|
||||
printf ' "editor.formatOnSave": true,\n'
|
||||
printf ' "editor.codeActionsOnSave": {\n'
|
||||
printf ' "source.fixAll": true,\n'
|
||||
printf ' "source.organizeImports": true\n'
|
||||
printf ' },\n'
|
||||
printf ' "files.autoSave": "afterDelay",\n'
|
||||
printf ' "files.autoSaveDelay": 1000,\n'
|
||||
printf ' "terminal.integrated.fontSize": 13,\n'
|
||||
printf ' "git.enableSmartCommit": true,\n'
|
||||
printf ' "git.confirmSync": false,\n'
|
||||
printf ' "python.defaultInterpreterPath": "%s/.venv/bin/python",\n' "$HOME_DIR"
|
||||
printf ' "python.linting.enabled": true,\n'
|
||||
printf ' "python.linting.pylintEnabled": false,\n'
|
||||
printf ' "python.linting.flake8Enabled": true,\n'
|
||||
printf ' "typescript.preferences.includePackageJsonAutoImports": "auto",\n'
|
||||
printf ' "javascript.preferences.includePackageJsonAutoImports": "auto",\n'
|
||||
printf ' "cursor.chat.showInEditorContextMenu": true,\n'
|
||||
printf ' "cursor.chat.alwaysShowInEditorContextMenu": true,\n'
|
||||
printf ' "cursor.general.enableWindowAIFeatures": true\n'
|
||||
printf '}\n'
|
||||
} > "$CURSOR_DIR/data/User/settings.json"
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2,
|
||||
"files.autoSave": "afterDelay",
|
||||
"files.autoSaveDelay": 1000,
|
||||
"git.enableSmartCommit": true,
|
||||
"terminal.integrated.defaultProfile.linux": "bash"
|
||||
}
|
||||
JSON
|
||||
|
||||
# Create development tasks configuration
|
||||
{
|
||||
printf '{\n'
|
||||
printf ' "version": "2.0.0",\n'
|
||||
printf ' "tasks": [\n'
|
||||
printf ' {\n'
|
||||
printf ' "label": "Dev Server",\n'
|
||||
printf ' "type": "shell",\n'
|
||||
printf ' "command": "npm run dev",\n'
|
||||
printf ' "group": "build",\n'
|
||||
printf ' "presentation": {\n'
|
||||
printf ' "echo": true,\n'
|
||||
printf ' "reveal": "always",\n'
|
||||
printf ' "focus": false,\n'
|
||||
printf ' "panel": "new"\n'
|
||||
printf ' },\n'
|
||||
printf ' "problemMatcher": []\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "label": "Python Dev Server",\n'
|
||||
printf ' "type": "shell",\n'
|
||||
printf ' "command": "uvicorn main:app --reload --host 0.0.0.0 --port 8000",\n'
|
||||
printf ' "group": "build",\n'
|
||||
printf ' "presentation": {\n'
|
||||
printf ' "echo": true,\n'
|
||||
printf ' "reveal": "always",\n'
|
||||
printf ' "focus": false,\n'
|
||||
printf ' "panel": "new"\n'
|
||||
printf ' },\n'
|
||||
printf ' "problemMatcher": []\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "label": "Install Dependencies",\n'
|
||||
printf ' "type": "shell",\n'
|
||||
printf ' "command": "npm install",\n'
|
||||
printf ' "group": "build",\n'
|
||||
printf ' "presentation": {\n'
|
||||
printf ' "echo": true,\n'
|
||||
printf ' "reveal": "always",\n'
|
||||
printf ' "focus": false,\n'
|
||||
printf ' "panel": "new"\n'
|
||||
printf ' }\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "label": "Python Install",\n'
|
||||
printf ' "type": "shell",\n'
|
||||
printf ' "command": "uv pip install -r requirements.txt",\n'
|
||||
printf ' "group": "build",\n'
|
||||
printf ' "presentation": {\n'
|
||||
printf ' "echo": true,\n'
|
||||
printf ' "reveal": "always",\n'
|
||||
printf ' "focus": false,\n'
|
||||
printf ' "panel": "new"\n'
|
||||
printf ' }\n'
|
||||
printf ' }\n'
|
||||
printf ' ]\n'
|
||||
printf '}\n'
|
||||
} > "$CURSOR_DIR/data/User/tasks.json"
|
||||
cat <<'JSON' > "$CURSOR_DIR/data/User/keybindings.json"
|
||||
[
|
||||
{
|
||||
"key": "ctrl+shift+;",
|
||||
"command": "workbench.action.terminal.toggleTerminal"
|
||||
}
|
||||
]
|
||||
JSON
|
||||
|
||||
# Create useful code snippets
|
||||
mkdir -p "$CURSOR_DIR/data/User/snippets"
|
||||
{
|
||||
printf '{\n'
|
||||
printf ' "FastAPI Basic App": {\n'
|
||||
printf ' "prefix": "fastapi-app",\n'
|
||||
printf ' "body": [\n'
|
||||
printf ' "from fastapi import FastAPI",\n'
|
||||
printf ' "from fastapi.middleware.cors import CORSMiddleware",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "app = FastAPI(title=\\"${1:My API}\\", version=\\"0.1.0\\")",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "app.add_middleware(",\n'
|
||||
printf ' " CORSMiddleware,",\n'
|
||||
printf ' " allow_origins=[\\"*\\"],",\n'
|
||||
printf ' " allow_credentials=True,",\n'
|
||||
printf ' " allow_methods=[\\"*\\"],",\n'
|
||||
printf ' " allow_headers=[\\"*\\"],",\n'
|
||||
printf ' ")",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "@app.get(\\"\\")\\",\n'
|
||||
printf ' "async def root():",\n'
|
||||
printf ' " return {\\"message\\": \\"${2:Hello World}\\"}",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "@app.get(\\"\/health\\")",\n'
|
||||
printf ' "async def health():",\n'
|
||||
printf ' " return {\\"status\\": \\"healthy\\"}",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "if __name__ == \\"__main__\\":",\n'
|
||||
printf ' " import uvicorn",\n'
|
||||
printf ' " uvicorn.run(app, host=\\"0.0.0.0\\", port=8000)"\n'
|
||||
printf ' ],\n'
|
||||
printf ' "description": "FastAPI basic application template"\n'
|
||||
printf ' },\n'
|
||||
printf ' "Next.js API Route": {\n'
|
||||
printf ' "prefix": "nextapi",\n'
|
||||
printf ' "body": [\n'
|
||||
printf ' "import { NextRequest, NextResponse } from '\''next\/server'\'';",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "export async function ${1:GET}(request: NextRequest) {",\n'
|
||||
printf ' " try {",\n'
|
||||
printf ' " \/\/ Your API logic here",\n'
|
||||
printf ' " return NextResponse.json({ message: '\''${2:Success}'\'' });",\n'
|
||||
printf ' " } catch (error) {",\n'
|
||||
printf ' " return NextResponse.json(",\n'
|
||||
printf ' " { error: '\''Internal Server Error'\'' },",\n'
|
||||
printf ' " { status: 500 }",\n'
|
||||
printf ' " );",\n'
|
||||
printf ' " }",\n'
|
||||
printf ' "}"\n'
|
||||
printf ' ],\n'
|
||||
printf ' "description": "Next.js API route template"\n'
|
||||
printf ' },\n'
|
||||
printf ' "Database Connection": {\n'
|
||||
printf ' "prefix": "db-connect",\n'
|
||||
printf ' "body": [\n'
|
||||
printf ' "import os",\n'
|
||||
printf ' "from sqlalchemy import create_engine",\n'
|
||||
printf ' "from sqlalchemy.orm import sessionmaker",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "DATABASE_URL = os.getenv(",\n'
|
||||
printf ' " \\"POSTGRES_URL\\",",\n'
|
||||
printf ' " \\"postgresql:\/\/postgres:password@localhost:5432\/postgres\\"",\n'
|
||||
printf ' ")",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "engine = create_engine(DATABASE_URL)",\n'
|
||||
printf ' "SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "def get_db():",\n'
|
||||
printf ' " db = SessionLocal()",\n'
|
||||
printf ' " try:",\n'
|
||||
printf ' " yield db",\n'
|
||||
printf ' " finally:",\n'
|
||||
printf ' " db.close()"\n'
|
||||
printf ' ],\n'
|
||||
printf ' "description": "Database connection setup"\n'
|
||||
printf ' }\n'
|
||||
printf '}\n'
|
||||
} > "$CURSOR_DIR/data/User/snippets/global.code-snippets"
|
||||
|
||||
# Set proper ownership (Unix-like systems only)
|
||||
if [[ "$OSTYPE" != "msys" && "$OSTYPE" != "cygwin" && "$OSTYPE" != "win32" ]]; then
|
||||
if command -v chown >/dev/null 2>&1; then
|
||||
chown -R "$USER_NAME:$USER_NAME" "$CURSOR_DIR" 2>/dev/null || {
|
||||
echo "⚠️ Could not set ownership - you may need to run with appropriate permissions"
|
||||
}
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "✅ Cursor IDE support configured"
|
||||
echo "🎯 Cursor will use optimized settings for this development environment"
|
||||
echo "Cursor configuration refreshed in $CURSOR_DIR"
|
||||
|
||||
454
tf/scripts/dev-tools.sh
Normal file → Executable file
454
tf/scripts/dev-tools.sh
Normal file → Executable file
@@ -1,443 +1,19 @@
|
||||
#!/bin/bash
|
||||
# Convert CRLF to LF if present (handles Windows line endings)
|
||||
if command -v dos2unix >/dev/null 2>&1; then
|
||||
dos2unix "$0" 2>/dev/null || true
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y \
|
||||
make tree jq curl wget unzip git ripgrep fd-find htop >/dev/null
|
||||
# fd binary name differs on Debian
|
||||
if [[ ! -e /usr/local/bin/fd && -e /usr/bin/fdfind ]]; then
|
||||
sudo ln -sf /usr/bin/fdfind /usr/local/bin/fd
|
||||
fi
|
||||
fi
|
||||
|
||||
set -e
|
||||
echo "🔧 Installing development extensions and tools..."
|
||||
|
||||
# Cross-platform system detection
|
||||
detect_system() {
|
||||
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
SYSTEM="debian"
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
SYSTEM="rhel"
|
||||
elif command -v pacman >/dev/null 2>&1; then
|
||||
SYSTEM="arch"
|
||||
else
|
||||
SYSTEM="linux"
|
||||
fi
|
||||
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
SYSTEM="macos"
|
||||
elif [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
|
||||
SYSTEM="windows"
|
||||
else
|
||||
SYSTEM="unknown"
|
||||
fi
|
||||
}
|
||||
|
||||
# Cross-platform package installation
|
||||
install_package() {
|
||||
local package="$1"
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
# Check if APT is locked before attempting
|
||||
if fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1; then
|
||||
echo "⚠️ APT is locked by another process, skipping $package installation"
|
||||
else
|
||||
apt-get update >/dev/null 2>&1 || true
|
||||
apt-get install -y "$package" 2>&1 || echo "⚠️ Failed to install $package"
|
||||
fi
|
||||
;;
|
||||
"rhel")
|
||||
yum install -y "$package" || dnf install -y "$package"
|
||||
;;
|
||||
"arch")
|
||||
pacman -S --noconfirm "$package"
|
||||
;;
|
||||
"macos")
|
||||
if command -v brew >/dev/null 2>&1; then
|
||||
brew install "$package"
|
||||
else
|
||||
echo "⚠️ Homebrew not found. Please install $package manually."
|
||||
fi
|
||||
;;
|
||||
"windows")
|
||||
if command -v choco >/dev/null 2>&1; then
|
||||
choco install -y "$package"
|
||||
elif command -v winget >/dev/null 2>&1; then
|
||||
winget install "$package"
|
||||
else
|
||||
echo "⚠️ Package manager not found. Please install $package manually."
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "⚠️ Unknown system. Please install $package manually."
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Detect system and user info
|
||||
detect_system
|
||||
HOME_DIR="${HOME:-/home/coder}"
|
||||
USER_NAME="${USER:-$(whoami)}"
|
||||
|
||||
# Architecture detection for downloads
|
||||
ARCH=$(uname -m)
|
||||
case "$ARCH" in
|
||||
"x86_64") ARCH="x86_64" ;;
|
||||
"aarch64"|"arm64") ARCH="arm64" ;;
|
||||
*) ARCH="x86_64" ;; # Default fallback
|
||||
esac
|
||||
|
||||
# OS-specific binary suffix
|
||||
case "$SYSTEM" in
|
||||
"windows") BIN_SUFFIX=".exe" ;;
|
||||
*) BIN_SUFFIX="" ;;
|
||||
esac
|
||||
|
||||
# Check if we need elevated privileges (skip on Windows/macOS package managers)
|
||||
check_privileges() {
|
||||
if [[ "$SYSTEM" == "debian" || "$SYSTEM" == "rhel" || "$SYSTEM" == "arch" ]]; then
|
||||
if [ "$EUID" -ne 0 ] && ! command -v sudo >/dev/null 2>&1; then
|
||||
echo "This script needs root privileges or sudo for system package installation"
|
||||
exit 1
|
||||
fi
|
||||
SUDO_CMD="sudo"
|
||||
else
|
||||
SUDO_CMD=""
|
||||
fi
|
||||
}
|
||||
|
||||
check_privileges
|
||||
|
||||
echo "📦 Installing additional CLI tools..."
|
||||
|
||||
# Ensure curl is available first
|
||||
if ! command -v curl >/dev/null 2>&1; then
|
||||
echo "📥 Installing curl..."
|
||||
case "$SYSTEM" in
|
||||
"debian") $SUDO_CMD apt-get update && $SUDO_CMD apt-get install -y curl ;;
|
||||
"rhel") $SUDO_CMD yum install -y curl || $SUDO_CMD dnf install -y curl ;;
|
||||
"arch") $SUDO_CMD pacman -S --noconfirm curl ;;
|
||||
"macos") install_package curl ;;
|
||||
"windows") install_package curl ;;
|
||||
esac
|
||||
if command -v npm >/dev/null 2>&1; then
|
||||
npm install -g tldr fkill-cli >/dev/null 2>&1 || true
|
||||
fi
|
||||
|
||||
# Function to install various development tools
|
||||
install_development_tools() {
|
||||
echo "🛠️ Installing development utilities..."
|
||||
|
||||
# GitHub CLI
|
||||
if ! command -v gh >/dev/null 2>&1; then
|
||||
echo "📥 Installing GitHub CLI..."
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | $SUDO_CMD dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
$SUDO_CMD chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
printf 'deb [arch=%s signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main\n' "$(dpkg --print-architecture)" | $SUDO_CMD tee /etc/apt/sources.list.d/github-cli.list > /dev/null
|
||||
$SUDO_CMD apt-get update
|
||||
$SUDO_CMD apt-get install -y gh
|
||||
;;
|
||||
"macos")
|
||||
install_package gh
|
||||
;;
|
||||
"windows")
|
||||
install_package gh
|
||||
;;
|
||||
*)
|
||||
echo "⚠️ Please install GitHub CLI manually for your system"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Docker Compose (if not already installed)
|
||||
if ! command -v docker-compose >/dev/null 2>&1; then
|
||||
echo "🐳 Installing Docker Compose..."
|
||||
case "$SYSTEM" in
|
||||
"windows")
|
||||
echo "⚠️ Please install Docker Desktop for Windows which includes Docker Compose"
|
||||
;;
|
||||
"macos")
|
||||
echo "⚠️ Please install Docker Desktop for macOS which includes Docker Compose"
|
||||
;;
|
||||
*)
|
||||
COMPOSE_URL="https://github.com/docker/compose/releases/latest/download/docker-compose-$(uname -s)-$(uname -m)"
|
||||
curl -L "$COMPOSE_URL" -o docker-compose
|
||||
$SUDO_CMD mv docker-compose /usr/local/bin/docker-compose
|
||||
$SUDO_CMD chmod +x /usr/local/bin/docker-compose
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Lazygit for better git UI
|
||||
if ! command -v lazygit >/dev/null 2>&1; then
|
||||
echo "🌿 Installing lazygit..."
|
||||
case "$SYSTEM" in
|
||||
"macos")
|
||||
install_package lazygit
|
||||
;;
|
||||
"windows")
|
||||
install_package lazygit
|
||||
;;
|
||||
*)
|
||||
LAZYGIT_VERSION=$(curl -s "https://api.github.com/repos/jesseduffield/lazygit/releases/latest" | grep '"tag_name":' | sed -E 's/.*"v*([^"]+)".*/\1/')
|
||||
case "$SYSTEM" in
|
||||
"linux")
|
||||
curl -Lo lazygit.tar.gz "https://github.com/jesseduffield/lazygit/releases/latest/download/lazygit_${LAZYGIT_VERSION}_Linux_${ARCH}.tar.gz"
|
||||
;;
|
||||
*)
|
||||
curl -Lo lazygit.tar.gz "https://github.com/jesseduffield/lazygit/releases/latest/download/lazygit_${LAZYGIT_VERSION}_Linux_x86_64.tar.gz"
|
||||
;;
|
||||
esac
|
||||
tar xf lazygit.tar.gz lazygit
|
||||
$SUDO_CMD install lazygit /usr/local/bin
|
||||
rm -f lazygit.tar.gz lazygit
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# btop for system monitoring (not in Ubuntu 20.04 repos)
|
||||
if ! command -v btop >/dev/null 2>&1; then
|
||||
echo "📊 Installing btop..."
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
# Check if APT is locked
|
||||
if fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1; then
|
||||
echo "⚠️ APT is locked, skipping btop/htop installation"
|
||||
else
|
||||
# Try to install btop, fallback to htop if not available
|
||||
if ! $SUDO_CMD apt-get install -y btop 2>/dev/null; then
|
||||
echo "ℹ️ btop not available in repos, installing htop as alternative"
|
||||
$SUDO_CMD apt-get install -y htop 2>/dev/null || true
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
"macos") install_package btop ;;
|
||||
"windows") install_package btop ;;
|
||||
*) echo "⚠️ Please install btop manually for your system" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# fd-find for better file searching
|
||||
if ! command -v fd >/dev/null 2>&1; then
|
||||
echo "🔍 Installing fd-find..."
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
# Try fd-find first, then try downloading from GitHub
|
||||
if ! $SUDO_CMD apt-get install -y fd-find 2>/dev/null; then
|
||||
echo "ℹ️ fd-find not available in repos, downloading from GitHub..."
|
||||
FD_VERSION=$(curl -s "https://api.github.com/repos/sharkdp/fd/releases/latest" | grep '"tag_name":' | sed -E 's/.*"v*([^"]+)".*/\1/')
|
||||
case "$ARCH" in
|
||||
"arm64"|"aarch64")
|
||||
FD_ARCH="aarch64"
|
||||
;;
|
||||
*)
|
||||
FD_ARCH="x86_64"
|
||||
;;
|
||||
esac
|
||||
curl -Lo fd.deb "https://github.com/sharkdp/fd/releases/download/v${FD_VERSION}/fd_${FD_VERSION}_amd64.deb" 2>/dev/null || \
|
||||
curl -Lo fd.deb "https://github.com/sharkdp/fd/releases/download/v${FD_VERSION}/fd-musl_${FD_VERSION}_amd64.deb" 2>/dev/null
|
||||
|
||||
if [[ -f fd.deb ]]; then
|
||||
$SUDO_CMD dpkg -i fd.deb 2>/dev/null || true
|
||||
$SUDO_CMD apt-get install -f -y 2>/dev/null || true
|
||||
rm -f fd.deb
|
||||
else
|
||||
echo "⚠️ Could not install fd automatically"
|
||||
fi
|
||||
else
|
||||
# Create symlink for easier usage if installed as fdfind
|
||||
$SUDO_CMD ln -sf /usr/bin/fdfind /usr/local/bin/fd 2>/dev/null || true
|
||||
fi
|
||||
;;
|
||||
"macos")
|
||||
install_package fd
|
||||
;;
|
||||
"windows")
|
||||
install_package fd
|
||||
;;
|
||||
*)
|
||||
echo "⚠️ Please install fd manually for your system"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# ripgrep for better text searching
|
||||
if ! command -v rg >/dev/null 2>&1; then
|
||||
echo "🔎 Installing ripgrep..."
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
# Try installing from repository first
|
||||
if ! $SUDO_CMD apt-get install -y ripgrep 2>/dev/null; then
|
||||
echo "ℹ️ ripgrep not available in repos, downloading from GitHub..."
|
||||
RG_VERSION=$(curl -s "https://api.github.com/repos/BurntSushi/ripgrep/releases/latest" | grep '"tag_name":' | sed -E 's/.*"v*([^"]+)".*/\1/')
|
||||
case "$ARCH" in
|
||||
"arm64"|"aarch64")
|
||||
RG_ARCH="aarch64"
|
||||
;;
|
||||
*)
|
||||
RG_ARCH="x86_64"
|
||||
;;
|
||||
esac
|
||||
curl -Lo ripgrep.deb "https://github.com/BurntSushi/ripgrep/releases/download/${RG_VERSION}/ripgrep_${RG_VERSION}_amd64.deb" 2>/dev/null || \
|
||||
curl -Lo ripgrep.deb "https://github.com/BurntSushi/ripgrep/releases/download/${RG_VERSION}/ripgrep_${RG_VERSION}-1_amd64.deb" 2>/dev/null
|
||||
|
||||
if [[ -f ripgrep.deb ]]; then
|
||||
$SUDO_CMD dpkg -i ripgrep.deb 2>/dev/null || true
|
||||
$SUDO_CMD apt-get install -f -y 2>/dev/null || true
|
||||
rm -f ripgrep.deb
|
||||
else
|
||||
echo "⚠️ Could not install ripgrep automatically"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
"macos") install_package ripgrep ;;
|
||||
"windows") install_package ripgrep ;;
|
||||
*) echo "⚠️ Please install ripgrep manually for your system" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# bat for better cat with syntax highlighting
|
||||
if ! command -v bat >/dev/null 2>&1; then
|
||||
echo "🦇 Installing bat..."
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
# Try installing from repository first
|
||||
if ! $SUDO_CMD apt-get install -y bat 2>/dev/null; then
|
||||
echo "ℹ️ bat not available in repos, downloading from GitHub..."
|
||||
BAT_VERSION=$(curl -s "https://api.github.com/repos/sharkdp/bat/releases/latest" | grep '"tag_name":' | sed -E 's/.*"v*([^"]+)".*/\1/')
|
||||
curl -Lo bat.deb "https://github.com/sharkdp/bat/releases/download/v${BAT_VERSION}/bat_${BAT_VERSION}_amd64.deb" 2>/dev/null || \
|
||||
curl -Lo bat.deb "https://github.com/sharkdp/bat/releases/download/v${BAT_VERSION}/bat-musl_${BAT_VERSION}_amd64.deb" 2>/dev/null
|
||||
|
||||
if [[ -f bat.deb ]]; then
|
||||
$SUDO_CMD dpkg -i bat.deb 2>/dev/null || true
|
||||
$SUDO_CMD apt-get install -f -y 2>/dev/null || true
|
||||
rm -f bat.deb
|
||||
else
|
||||
echo "⚠️ Could not install bat automatically"
|
||||
fi
|
||||
else
|
||||
# Create symlink for easier usage if installed as batcat
|
||||
$SUDO_CMD ln -sf /usr/bin/batcat /usr/local/bin/bat 2>/dev/null || true
|
||||
fi
|
||||
;;
|
||||
"macos")
|
||||
install_package bat
|
||||
;;
|
||||
"windows")
|
||||
install_package bat
|
||||
;;
|
||||
*)
|
||||
echo "⚠️ Please install bat manually for your system"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# eza for better ls (modern replacement for exa)
|
||||
if ! command -v eza >/dev/null 2>&1; then
|
||||
echo "📁 Installing eza..."
|
||||
case "$SYSTEM" in
|
||||
"macos")
|
||||
install_package eza
|
||||
;;
|
||||
"windows")
|
||||
install_package eza
|
||||
;;
|
||||
*)
|
||||
case "$ARCH" in
|
||||
"arm64"|"aarch64")
|
||||
EZA_ARCH="aarch64"
|
||||
;;
|
||||
*)
|
||||
EZA_ARCH="x86_64"
|
||||
;;
|
||||
esac
|
||||
curl -L "https://github.com/eza-community/eza/releases/latest/download/eza_${EZA_ARCH}-unknown-linux-gnu.tar.gz" | $SUDO_CMD tar xz -C /usr/local/bin 2>/dev/null || {
|
||||
echo "⚠️ Could not install eza automatically"
|
||||
}
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
}
|
||||
|
||||
# Install all development tools
|
||||
install_development_tools
|
||||
|
||||
# Set up user-specific configurations
|
||||
echo "👤 Setting up user-specific tools..."
|
||||
|
||||
# Create user setup script to handle cross-platform differences
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# User-specific tool setup\n'
|
||||
printf '\n'
|
||||
printf '# Detect shell configuration file\n'
|
||||
printf 'if [[ "$SHELL" == *"zsh"* && -f "$HOME/.zshrc" ]]; then\n'
|
||||
printf ' SHELL_RC="$HOME/.zshrc"\n'
|
||||
printf 'elif [[ -f "$HOME/.bashrc" ]]; then\n'
|
||||
printf ' SHELL_RC="$HOME/.bashrc"\n'
|
||||
printf 'else\n'
|
||||
printf ' SHELL_RC="$HOME/.profile"\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf '# Add useful aliases if not already present\n'
|
||||
printf 'if ! grep -q "# Development tools aliases" "$SHELL_RC" 2>/dev/null; then\n'
|
||||
printf ' printf "\\n# Development tools aliases\\n" >> "$SHELL_RC"\n'
|
||||
printf ' if command -v bat >/dev/null 2>&1; then\n'
|
||||
printf ' printf "alias cat='\''bat'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' fi\n'
|
||||
printf ' if command -v eza >/dev/null 2>&1; then\n'
|
||||
printf ' printf "alias ls='\''eza'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias ll='\''eza -la'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias la='\''eza -la'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' fi\n'
|
||||
printf ' if command -v fd >/dev/null 2>&1; then\n'
|
||||
printf ' printf "alias find='\''fd'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' fi\n'
|
||||
printf ' if command -v rg >/dev/null 2>&1; then\n'
|
||||
printf ' printf "alias grep='\''rg'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' fi\n'
|
||||
printf ' if command -v lazygit >/dev/null 2>&1; then\n'
|
||||
printf ' printf "alias git-ui='\''lazygit'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' fi\n'
|
||||
printf ' if command -v btop >/dev/null 2>&1; then\n'
|
||||
printf ' printf "alias top='\''btop'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' elif command -v htop >/dev/null 2>&1; then\n'
|
||||
printf ' printf "alias top='\''htop'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' fi\n'
|
||||
printf ' printf "\\n" >> "$SHELL_RC"\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf '# Install Node.js tools if npm is available\n'
|
||||
printf 'if command -v npm >/dev/null 2>&1; then\n'
|
||||
printf ' # Install tldr for better man pages\n'
|
||||
printf ' if ! command -v tldr >/dev/null 2>&1; then\n'
|
||||
printf ' npm install -g tldr 2>/dev/null || echo "⚠️ Could not install tldr"\n'
|
||||
printf ' fi\n'
|
||||
printf ' \n'
|
||||
printf ' # Install fkill for better process management\n'
|
||||
printf ' if ! command -v fkill >/dev/null 2>&1; then\n'
|
||||
printf ' npm install -g fkill-cli 2>/dev/null || echo "⚠️ Could not install fkill-cli"\n'
|
||||
printf ' fi\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf 'echo "✅ Development tools installed and configured!"\n'
|
||||
} > /tmp/user_setup.sh
|
||||
|
||||
# Run user setup based on system type
|
||||
if [[ "$SYSTEM" == "windows" ]]; then
|
||||
# On Windows, run directly
|
||||
bash /tmp/user_setup.sh
|
||||
else
|
||||
# On Unix-like systems, try to switch to target user if different
|
||||
if [[ "$USER_NAME" != "$(whoami)" ]] && command -v su >/dev/null 2>&1; then
|
||||
su - "$USER_NAME" -c "bash /tmp/user_setup.sh" 2>/dev/null || {
|
||||
echo "⚠️ Could not switch to user $USER_NAME, running as current user"
|
||||
bash /tmp/user_setup.sh
|
||||
}
|
||||
else
|
||||
bash /tmp/user_setup.sh
|
||||
fi
|
||||
fi
|
||||
|
||||
# Clean up
|
||||
rm -f /tmp/user_setup.sh
|
||||
|
||||
echo "🎉 All development tools installed successfully!"
|
||||
echo "💡 Available tools: gh, docker-compose, lazygit, htop/btop, fd, rg, bat, eza, tldr, fkill"
|
||||
echo "💡 Aliases configured: cat→bat, ls→eza, find→fd, grep→rg, git-ui→lazygit, top→htop/btop"
|
||||
echo "💡 Restart your shell or run 'source ~/.bashrc' (or ~/.zshrc) to use the new aliases"
|
||||
echo "Development tooling refreshed."
|
||||
|
||||
186
tf/scripts/git-hooks.sh
Normal file → Executable file
186
tf/scripts/git-hooks.sh
Normal file → Executable file
@@ -1,169 +1,27 @@
|
||||
#!/bin/bash
|
||||
# Convert CRLF to LF if present (handles Windows line endings)
|
||||
if command -v dos2unix >/dev/null 2>&1; then
|
||||
dos2unix "$0" 2>/dev/null || true
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
REPO_DIR="/workspaces"
|
||||
HOOK_DIR="$REPO_DIR/.git/hooks"
|
||||
META_DIR="/tmp/git-metadata"
|
||||
|
||||
if [[ ! -d "$REPO_DIR/.git" ]]; then
|
||||
echo "No Git repository found in $REPO_DIR; skipping hook install"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
mkdir -p "$HOOK_DIR" "$META_DIR"
|
||||
|
||||
cat <<'HOOK' > "$HOOK_DIR/post-commit"
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
echo "📝 Setting up Git hooks and metadata capture..."
|
||||
META_DIR=/tmp/git-metadata
|
||||
mkdir -p "$META_DIR"
|
||||
|
||||
# Cross-platform directory and user detection
|
||||
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
|
||||
HOME_DIR="${USERPROFILE:-$HOME}"
|
||||
USER_NAME="${USERNAME:-${USER:-coder}}"
|
||||
WORKSPACES_DIR="${HOME_DIR}/workspaces"
|
||||
TEMP_DIR="${TEMP:-/tmp}"
|
||||
else
|
||||
HOME_DIR="${HOME:-/home/coder}"
|
||||
USER_NAME="${USER:-coder}"
|
||||
WORKSPACES_DIR="/workspaces"
|
||||
TEMP_DIR="/tmp"
|
||||
fi
|
||||
git branch --show-current > "$META_DIR/current-branch" 2>/dev/null || echo "main" > "$META_DIR/current-branch"
|
||||
git rev-parse HEAD > "$META_DIR/commit-hash" 2>/dev/null || echo "unknown" > "$META_DIR/commit-hash"
|
||||
git remote get-url origin > "$META_DIR/remote-url" 2>/dev/null || echo "no-remote" > "$META_DIR/remote-url"
|
||||
HOOK
|
||||
chmod +x "$HOOK_DIR/post-commit"
|
||||
|
||||
# Ensure workspaces directory exists and navigate to it
|
||||
mkdir -p "$WORKSPACES_DIR"
|
||||
cd "$WORKSPACES_DIR"
|
||||
|
||||
# Initialize git repository if it doesn't exist
|
||||
if [ ! -d ".git" ]; then
|
||||
echo "🔧 Initializing git repository..."
|
||||
git init
|
||||
fi
|
||||
|
||||
# Create .git/hooks directory if it doesn't exist
|
||||
mkdir -p .git/hooks
|
||||
|
||||
# Create post-commit hook for metadata capture using printf
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Post-commit hook to capture git metadata\n'
|
||||
printf 'echo "📝 Capturing git metadata after commit..."\n'
|
||||
printf '\n'
|
||||
printf '# Cross-platform temp directory detection\n'
|
||||
printf 'if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then\n'
|
||||
printf ' TEMP_DIR="${TEMP:-/tmp}"\n'
|
||||
printf 'else\n'
|
||||
printf ' TEMP_DIR="/tmp"\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf '# Ensure metadata directory exists\n'
|
||||
printf 'mkdir -p "$TEMP_DIR/git-metadata"\n'
|
||||
printf '\n'
|
||||
printf '# Capture current git state\n'
|
||||
printf 'git branch --show-current > "$TEMP_DIR/git-metadata/current-branch" 2>/dev/null || printf "main" > "$TEMP_DIR/git-metadata/current-branch"\n'
|
||||
printf 'git rev-parse HEAD > "$TEMP_DIR/git-metadata/commit-hash" 2>/dev/null || printf "no-commits" > "$TEMP_DIR/git-metadata/commit-hash"\n'
|
||||
printf 'git remote get-url origin > "$TEMP_DIR/git-metadata/remote-url" 2>/dev/null || printf "no-remote" > "$TEMP_DIR/git-metadata/remote-url"\n'
|
||||
printf '\n'
|
||||
printf '# Log the commit for development tracking\n'
|
||||
printf 'printf "%%s: Commit %%s on branch %%s\\n" "$(date)" "$(git rev-parse --short HEAD 2>/dev/null || echo "unknown")" "$(git branch --show-current 2>/dev/null || echo "unknown")" >> "$TEMP_DIR/git-metadata/commit-log"\n'
|
||||
printf '\n'
|
||||
printf 'echo "✅ Git metadata updated"\n'
|
||||
} > .git/hooks/post-commit
|
||||
|
||||
# Create pre-push hook for quality checks using printf
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Pre-push hook for basic quality checks\n'
|
||||
printf 'echo "🔍 Running pre-push quality checks..."\n'
|
||||
printf '\n'
|
||||
printf '# Check if package.json exists and run tests\n'
|
||||
printf 'if [ -f "package.json" ]; then\n'
|
||||
printf ' echo "📦 Found Node.js project, checking scripts..."\n'
|
||||
printf ' if npm run --silent test --if-present 2>/dev/null; then\n'
|
||||
printf ' echo "✅ Tests passed"\n'
|
||||
printf ' else\n'
|
||||
printf ' echo "⚠️ Tests not found or failed - pushing anyway"\n'
|
||||
printf ' fi\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf '# Check if requirements.txt or pyproject.toml exists\n'
|
||||
printf 'if [ -f "requirements.txt" ] || [ -f "pyproject.toml" ]; then\n'
|
||||
printf ' echo "🐍 Found Python project..."\n'
|
||||
printf ' # Could add Python linting here\n'
|
||||
printf ' echo "✅ Python project checks passed"\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf '# Check for large files (cross-platform compatible)\n'
|
||||
printf 'echo "📁 Checking for large files..."\n'
|
||||
printf 'if command -v find >/dev/null 2>&1; then\n'
|
||||
printf ' large_files=$(find . -type f -size +100M 2>/dev/null | head -5)\n'
|
||||
printf ' if [ ! -z "$large_files" ]; then\n'
|
||||
printf ' echo "⚠️ Large files detected:"\n'
|
||||
printf ' printf "%%s\\n" "$large_files"\n'
|
||||
printf ' echo "Consider using Git LFS for large files"\n'
|
||||
printf ' fi\n'
|
||||
printf 'else\n'
|
||||
printf ' echo "⚠️ find command not available, skipping large file check"\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf 'echo "✅ Pre-push checks completed"\n'
|
||||
} > .git/hooks/pre-push
|
||||
|
||||
# Make hooks executable (cross-platform compatible)
|
||||
if [[ "$OSTYPE" != "msys" && "$OSTYPE" != "cygwin" && "$OSTYPE" != "win32" ]]; then
|
||||
chmod +x .git/hooks/post-commit
|
||||
chmod +x .git/hooks/pre-push
|
||||
else
|
||||
# On Windows, Git Bash should handle executable permissions automatically
|
||||
echo "🔧 Git hooks created (Windows will handle executable permissions)"
|
||||
fi
|
||||
|
||||
# Set proper ownership (Unix-like systems only)
|
||||
if [[ "$OSTYPE" != "msys" && "$OSTYPE" != "cygwin" && "$OSTYPE" != "win32" ]]; then
|
||||
if command -v chown >/dev/null 2>&1 && [ "$USER_NAME" != "$(whoami)" ]; then
|
||||
chown -R "$USER_NAME:$USER_NAME" .git/hooks 2>/dev/null || {
|
||||
echo "⚠️ Could not set ownership - you may need to run with appropriate permissions"
|
||||
}
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create a helper script for viewing git metadata
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Helper script to view captured git metadata\n'
|
||||
printf '\n'
|
||||
printf '# Cross-platform temp directory detection\n'
|
||||
printf 'if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then\n'
|
||||
printf ' TEMP_DIR="${TEMP:-/tmp}"\n'
|
||||
printf 'else\n'
|
||||
printf ' TEMP_DIR="/tmp"\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf 'METADATA_DIR="$TEMP_DIR/git-metadata"\n'
|
||||
printf '\n'
|
||||
printf 'echo "📊 Git Metadata Summary"\n'
|
||||
printf 'echo "====================="\n'
|
||||
printf '\n'
|
||||
printf 'if [ -d "$METADATA_DIR" ]; then\n'
|
||||
printf ' if [ -f "$METADATA_DIR/current-branch" ]; then\n'
|
||||
printf ' printf "Current Branch: %%s\\n" "$(cat "$METADATA_DIR/current-branch")"\n'
|
||||
printf ' fi\n'
|
||||
printf ' \n'
|
||||
printf ' if [ -f "$METADATA_DIR/commit-hash" ]; then\n'
|
||||
printf ' printf "Latest Commit: %%s\\n" "$(cat "$METADATA_DIR/commit-hash")"\n'
|
||||
printf ' fi\n'
|
||||
printf ' \n'
|
||||
printf ' if [ -f "$METADATA_DIR/remote-url" ]; then\n'
|
||||
printf ' printf "Remote URL: %%s\\n" "$(cat "$METADATA_DIR/remote-url")"\n'
|
||||
printf ' fi\n'
|
||||
printf ' \n'
|
||||
printf ' if [ -f "$METADATA_DIR/commit-log" ]; then\n'
|
||||
printf ' echo ""\n'
|
||||
printf ' echo "Recent Commits:"\n'
|
||||
printf ' tail -5 "$METADATA_DIR/commit-log" 2>/dev/null || echo "No commit log available"\n'
|
||||
printf ' fi\n'
|
||||
printf 'else\n'
|
||||
printf ' echo "No git metadata found. Make a commit to generate metadata."\n'
|
||||
printf 'fi\n'
|
||||
} > .git/hooks/show-metadata
|
||||
|
||||
# Make metadata viewer executable
|
||||
if [[ "$OSTYPE" != "msys" && "$OSTYPE" != "cygwin" && "$OSTYPE" != "win32" ]]; then
|
||||
chmod +x .git/hooks/show-metadata
|
||||
fi
|
||||
|
||||
echo "✅ Git hooks and metadata capture configured"
|
||||
echo "📝 Git metadata will be automatically captured on commits"
|
||||
echo "🔍 Pre-push quality checks will run before each push"
|
||||
echo "💡 Run '.git/hooks/show-metadata' to view captured git metadata"
|
||||
echo "💡 Metadata is stored in: $TEMP_DIR/git-metadata/"
|
||||
echo "Git post-commit hook installed for metadata capture."
|
||||
|
||||
103
tf/scripts/port-forward.sh
Normal file → Executable file
103
tf/scripts/port-forward.sh
Normal file → Executable file
@@ -1,68 +1,57 @@
|
||||
#!/bin/bash
|
||||
# Manual port forwarding script for Coder services
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
echo "🔌 Setting up port forwarding for services..."
|
||||
WORKSPACE_ID="${CODER_WORKSPACE_ID:-}"
|
||||
if [[ -z "${WORKSPACE_ID}" && -f /tmp/git-metadata/workspace-id ]]; then
|
||||
WORKSPACE_ID="$(cat /tmp/git-metadata/workspace-id)"
|
||||
fi
|
||||
|
||||
if [[ -z "${WORKSPACE_ID}" ]]; then
|
||||
echo "Unable to determine CODER_WORKSPACE_ID; skipping port forwarding" >&2
|
||||
exit 0
|
||||
fi
|
||||
|
||||
SERVICES_ENABLED="${ENABLE_SERVICES:-false}"
|
||||
PGADMIN_ENABLED="${ENABLE_PGADMIN:-false}"
|
||||
JUPYTER_ENABLED="${ENABLE_JUPYTER:-false}"
|
||||
|
||||
# Install socat if not available
|
||||
if ! command -v socat >/dev/null 2>&1; then
|
||||
echo "📦 Installing socat..."
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -qq && sudo apt-get install -y socat
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
sudo apk add --no-cache socat
|
||||
else
|
||||
echo "❌ Cannot install socat automatically. Please install it manually."
|
||||
exit 1
|
||||
fi
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y socat >/dev/null
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
sudo apk add --no-cache socat >/dev/null
|
||||
else
|
||||
echo "socat is required for port forwarding but could not be installed automatically" >&2
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Kill any existing socat processes
|
||||
echo "🔄 Stopping existing port forwards..."
|
||||
pkill -f "socat.*5050" 2>/dev/null || true
|
||||
pkill -f "socat.*6333" 2>/dev/null || true
|
||||
# stop previous forwards if they exist
|
||||
pkill -f "socat.*pgadmin" >/dev/null 2>&1 || true
|
||||
pkill -f "socat.*qdrant" >/dev/null 2>&1 || true
|
||||
pkill -f "socat.*jupyter" >/dev/null 2>&1 || true
|
||||
|
||||
# Get workspace ID from Coder metadata or environment
|
||||
if [ -n "$CODER_WORKSPACE_ID" ]; then
|
||||
WORKSPACE_ID="$CODER_WORKSPACE_ID"
|
||||
elif [ -f /tmp/git-metadata/workspace-id ]; then
|
||||
WORKSPACE_ID=$(cat /tmp/git-metadata/workspace-id)
|
||||
if [[ "${SERVICES_ENABLED}" == "true" ]]; then
|
||||
if [[ "${PGADMIN_ENABLED}" == "true" ]]; then
|
||||
echo "Forwarding pgAdmin to localhost:5050"
|
||||
nohup socat TCP-LISTEN:5050,reuseaddr,fork TCP:pgadmin-${WORKSPACE_ID}:80 >/tmp/socat-pgadmin.log 2>&1 &
|
||||
else
|
||||
echo "pgAdmin disabled; skipping port forward"
|
||||
fi
|
||||
|
||||
echo "Forwarding Qdrant to localhost:6333"
|
||||
nohup socat TCP-LISTEN:6333,reuseaddr,fork TCP:qdrant-${WORKSPACE_ID}:6333 >/tmp/socat-qdrant.log 2>&1 &
|
||||
else
|
||||
# Try to extract from container names
|
||||
WORKSPACE_ID=$(docker ps --format '{{.Names}}' 2>/dev/null | grep -E 'postgres-|redis-|qdrant-' | head -1 | sed 's/.*-//')
|
||||
if [ -z "$WORKSPACE_ID" ]; then
|
||||
echo "❌ Cannot determine workspace ID. Please set CODER_WORKSPACE_ID environment variable."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
echo "📍 Workspace ID: $WORKSPACE_ID"
|
||||
|
||||
# Start port forwarding
|
||||
echo "🚀 Starting port forwarding..."
|
||||
|
||||
# Forward pgAdmin (port 5050 -> pgadmin container port 80)
|
||||
if [ "${ENABLE_PGADMIN:-true}" = "true" ]; then
|
||||
echo " - pgAdmin: localhost:5050 -> pgadmin-$WORKSPACE_ID:80"
|
||||
nohup socat TCP-LISTEN:5050,reuseaddr,fork TCP:pgadmin-$WORKSPACE_ID:80 > /tmp/socat-pgadmin.log 2>&1 &
|
||||
echo "Database services disabled; skipping pgAdmin/Qdrant forwards"
|
||||
fi
|
||||
|
||||
# Forward Qdrant (port 6333 -> qdrant container port 6333)
|
||||
echo " - Qdrant: localhost:6333 -> qdrant-$WORKSPACE_ID:6333"
|
||||
nohup socat TCP-LISTEN:6333,reuseaddr,fork TCP:qdrant-$WORKSPACE_ID:6333 > /tmp/socat-qdrant.log 2>&1 &
|
||||
if [[ "${JUPYTER_ENABLED}" == "true" ]]; then
|
||||
echo "Forwarding JupyterLab to localhost:8888"
|
||||
nohup socat TCP-LISTEN:8888,reuseaddr,fork TCP:jupyter-${WORKSPACE_ID}:8888 >/tmp/socat-jupyter.log 2>&1 &
|
||||
else
|
||||
echo "JupyterLab disabled; skipping port forward"
|
||||
fi
|
||||
|
||||
# Give processes time to start
|
||||
sleep 2
|
||||
|
||||
# Check status
|
||||
echo ""
|
||||
echo "✅ Port forwarding status:"
|
||||
ps aux | grep -E "socat.*(5050|6333)" | grep -v grep || echo "❌ No port forwarding processes found"
|
||||
|
||||
echo ""
|
||||
echo "📝 Logs available at:"
|
||||
echo " - /tmp/socat-pgadmin.log"
|
||||
echo " - /tmp/socat-qdrant.log"
|
||||
|
||||
echo ""
|
||||
echo "🌐 Access services at:"
|
||||
echo " - pgAdmin: http://localhost:5050"
|
||||
echo " - Qdrant: http://localhost:6333/dashboard"
|
||||
ps -o pid,cmd -C socat || true
|
||||
|
||||
287
tf/scripts/windsurf-setup.sh
Normal file → Executable file
287
tf/scripts/windsurf-setup.sh
Normal file → Executable file
@@ -1,280 +1,15 @@
|
||||
#!/bin/bash
|
||||
# Convert CRLF to LF if present (handles Windows line endings)
|
||||
if command -v dos2unix >/dev/null 2>&1; then
|
||||
dos2unix "$0" 2>/dev/null || true
|
||||
fi
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
set -e
|
||||
echo "🌊 Setting up Windsurf IDE support..."
|
||||
WIND_DIR="$HOME/.windsurf"
|
||||
mkdir -p "$WIND_DIR/User"
|
||||
|
||||
# Cross-platform user and directory detection
|
||||
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
|
||||
HOME_DIR="${USERPROFILE:-$HOME}"
|
||||
USER_NAME="${USERNAME:-${USER:-coder}}"
|
||||
WINDSURF_DIR="$HOME_DIR/.windsurf"
|
||||
else
|
||||
HOME_DIR="${HOME:-/home/coder}"
|
||||
USER_NAME="${USER:-coder}"
|
||||
WINDSURF_DIR="$HOME_DIR/.windsurf"
|
||||
fi
|
||||
|
||||
# Create Windsurf configuration directories
|
||||
mkdir -p "$WINDSURF_DIR/data/User"
|
||||
mkdir -p "$WINDSURF_DIR/extensions"
|
||||
|
||||
# Create optimized Windsurf settings using printf to ensure LF line endings
|
||||
cat <<'JSON' > "$WIND_DIR/User/settings.json"
|
||||
{
|
||||
printf '{\n'
|
||||
printf ' "workbench.colorTheme": "Windsurf Dark",\n'
|
||||
printf ' "editor.fontSize": 14,\n'
|
||||
printf ' "editor.tabSize": 2,\n'
|
||||
printf ' "editor.insertSpaces": true,\n'
|
||||
printf ' "editor.formatOnSave": true,\n'
|
||||
printf ' "editor.codeActionsOnSave": {\n'
|
||||
printf ' "source.fixAll": true,\n'
|
||||
printf ' "source.organizeImports": true\n'
|
||||
printf ' },\n'
|
||||
printf ' "files.autoSave": "afterDelay",\n'
|
||||
printf ' "files.autoSaveDelay": 1000,\n'
|
||||
printf ' "terminal.integrated.fontSize": 13,\n'
|
||||
printf ' "git.enableSmartCommit": true,\n'
|
||||
printf ' "git.confirmSync": false,\n'
|
||||
printf ' "python.defaultInterpreterPath": "%s/.venv/bin/python",\n' "$HOME_DIR"
|
||||
printf ' "python.linting.enabled": true,\n'
|
||||
printf ' "python.linting.pylintEnabled": false,\n'
|
||||
printf ' "python.linting.flake8Enabled": true,\n'
|
||||
printf ' "typescript.preferences.includePackageJsonAutoImports": "auto",\n'
|
||||
printf ' "javascript.preferences.includePackageJsonAutoImports": "auto",\n'
|
||||
printf ' "windsurf.ai.enabled": true,\n'
|
||||
printf ' "windsurf.ai.showInEditorContextMenu": true,\n'
|
||||
printf ' "windsurf.chat.enabled": true,\n'
|
||||
printf ' "windsurf.codeCompletion.enabled": true\n'
|
||||
printf '}\n'
|
||||
} > "$WINDSURF_DIR/data/User/settings.json"
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2,
|
||||
"files.autoSave": "onFocusChange"
|
||||
}
|
||||
JSON
|
||||
|
||||
# Create development keybindings using printf
|
||||
{
|
||||
printf '[\n'
|
||||
printf ' {\n'
|
||||
printf ' "key": "ctrl+shift+a",\n'
|
||||
printf ' "command": "windsurf.chat.open"\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "key": "ctrl+shift+c",\n'
|
||||
printf ' "command": "windsurf.ai.generateCode"\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "key": "ctrl+shift+r",\n'
|
||||
printf ' "command": "windsurf.ai.refactorSelection"\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "key": "ctrl+shift+e",\n'
|
||||
printf ' "command": "windsurf.ai.explainCode"\n'
|
||||
printf ' }\n'
|
||||
printf ']\n'
|
||||
} > "$WINDSURF_DIR/data/User/keybindings.json"
|
||||
|
||||
# Create development tasks configuration for Windsurf
|
||||
{
|
||||
printf '{\n'
|
||||
printf ' "version": "2.0.0",\n'
|
||||
printf ' "tasks": [\n'
|
||||
printf ' {\n'
|
||||
printf ' "label": "Dev Server",\n'
|
||||
printf ' "type": "shell",\n'
|
||||
printf ' "command": "npm run dev",\n'
|
||||
printf ' "group": "build",\n'
|
||||
printf ' "presentation": {\n'
|
||||
printf ' "echo": true,\n'
|
||||
printf ' "reveal": "always",\n'
|
||||
printf ' "focus": false,\n'
|
||||
printf ' "panel": "new"\n'
|
||||
printf ' },\n'
|
||||
printf ' "problemMatcher": []\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "label": "Python Dev Server",\n'
|
||||
printf ' "type": "shell",\n'
|
||||
printf ' "command": "uvicorn main:app --reload --host 0.0.0.0 --port 8000",\n'
|
||||
printf ' "group": "build",\n'
|
||||
printf ' "presentation": {\n'
|
||||
printf ' "echo": true,\n'
|
||||
printf ' "reveal": "always",\n'
|
||||
printf ' "focus": false,\n'
|
||||
printf ' "panel": "new"\n'
|
||||
printf ' },\n'
|
||||
printf ' "problemMatcher": []\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "label": "AI Code Review",\n'
|
||||
printf ' "type": "shell",\n'
|
||||
printf ' "command": "echo",\n'
|
||||
printf ' "args": ["Use Ctrl+Shift+R to refactor selection with Windsurf AI"],\n'
|
||||
printf ' "group": "build",\n'
|
||||
printf ' "presentation": {\n'
|
||||
printf ' "echo": true,\n'
|
||||
printf ' "reveal": "always",\n'
|
||||
printf ' "focus": false,\n'
|
||||
printf ' "panel": "new"\n'
|
||||
printf ' }\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "label": "Install Dependencies",\n'
|
||||
printf ' "type": "shell",\n'
|
||||
printf ' "command": "npm install",\n'
|
||||
printf ' "group": "build",\n'
|
||||
printf ' "presentation": {\n'
|
||||
printf ' "echo": true,\n'
|
||||
printf ' "reveal": "always",\n'
|
||||
printf ' "focus": false,\n'
|
||||
printf ' "panel": "new"\n'
|
||||
printf ' }\n'
|
||||
printf ' }\n'
|
||||
printf ' ]\n'
|
||||
printf '}\n'
|
||||
} > "$WINDSURF_DIR/data/User/tasks.json"
|
||||
|
||||
# Create useful code snippets for Windsurf
|
||||
mkdir -p "$WINDSURF_DIR/data/User/snippets"
|
||||
{
|
||||
printf '{\n'
|
||||
printf ' "Windsurf AI Comment": {\n'
|
||||
printf ' "prefix": "ai-comment",\n'
|
||||
printf ' "body": [\n'
|
||||
printf ' "// AI-assisted code: ${1:description}",\n'
|
||||
printf ' "// Generated with Windsurf AI on $(date)"\n'
|
||||
printf ' ],\n'
|
||||
printf ' "description": "Add AI assistance comment"\n'
|
||||
printf ' },\n'
|
||||
printf ' "FastAPI with AI Comments": {\n'
|
||||
printf ' "prefix": "fastapi-ai",\n'
|
||||
printf ' "body": [\n'
|
||||
printf ' "# AI-enhanced FastAPI application",\n'
|
||||
printf ' "from fastapi import FastAPI",\n'
|
||||
printf ' "from fastapi.middleware.cors import CORSMiddleware",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "app = FastAPI(",\n'
|
||||
printf ' " title=\\"${1:AI-Enhanced API}\\",",\n'
|
||||
printf ' " description=\\"API built with Windsurf AI assistance\\",",\n'
|
||||
printf ' " version=\\"0.1.0\\"",\n'
|
||||
printf ' ")",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "# AI-suggested CORS configuration",\n'
|
||||
printf ' "app.add_middleware(",\n'
|
||||
printf ' " CORSMiddleware,",\n'
|
||||
printf ' " allow_origins=[\\"*\\"],",\n'
|
||||
printf ' " allow_credentials=True,",\n'
|
||||
printf ' " allow_methods=[\\"*\\"],",\n'
|
||||
printf ' " allow_headers=[\\"*\\"],",\n'
|
||||
printf ' ")",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "@app.get(\\"\/\\")\\",\n'
|
||||
printf ' "async def root():",\n'
|
||||
printf ' " return {\\"message\\": \\"${2:Hello from AI-enhanced API}\\"}"\n'
|
||||
printf ' ],\n'
|
||||
printf ' "description": "AI-enhanced FastAPI template"\n'
|
||||
printf ' },\n'
|
||||
printf ' "React Component with AI": {\n'
|
||||
printf ' "prefix": "react-ai",\n'
|
||||
printf ' "body": [\n'
|
||||
printf ' "// AI-enhanced React component",\n'
|
||||
printf ' "import React, { useState, useEffect } from '\''react'\'';",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "interface ${1:Component}Props {",\n'
|
||||
printf ' " // AI-suggested props",\n'
|
||||
printf ' " title?: string;",\n'
|
||||
printf ' "}",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "const ${1:Component}: React.FC<${1:Component}Props> = ({ title = '\''${2:Default Title}'\'' }) => {",\n'
|
||||
printf ' " const [state, setState] = useState<string>('\'\'');",\n'
|
||||
printf ' "",\n'
|
||||
printf ' " // AI-suggested useEffect",\n'
|
||||
printf ' " useEffect(() => {",\n'
|
||||
printf ' " // Component initialization",\n'
|
||||
printf ' " }, []);",\n'
|
||||
printf ' "",\n'
|
||||
printf ' " return (",\n'
|
||||
printf ' " <div>",\n'
|
||||
printf ' " <h1>{title}</h1>",\n'
|
||||
printf ' " {/* AI-enhanced component content */}",\n'
|
||||
printf ' " </div>",\n'
|
||||
printf ' " );",\n'
|
||||
printf ' "};",\n'
|
||||
printf ' "",\n'
|
||||
printf ' "export default ${1:Component};"\n'
|
||||
printf ' ],\n'
|
||||
printf ' "description": "AI-enhanced React component template"\n'
|
||||
printf ' }\n'
|
||||
printf '}\n'
|
||||
} > "$WINDSURF_DIR/data/User/snippets/windsurf-ai.code-snippets"
|
||||
|
||||
# Create Windsurf-specific launch configuration
|
||||
{
|
||||
printf '{\n'
|
||||
printf ' "version": "0.2.0",\n'
|
||||
printf ' "configurations": [\n'
|
||||
printf ' {\n'
|
||||
printf ' "name": "Debug Node.js with AI",\n'
|
||||
printf ' "type": "node",\n'
|
||||
printf ' "request": "launch",\n'
|
||||
printf ' "program": "${workspaceFolder}/index.js",\n'
|
||||
printf ' "console": "integratedTerminal",\n'
|
||||
printf ' "internalConsoleOptions": "neverOpen"\n'
|
||||
printf ' },\n'
|
||||
printf ' {\n'
|
||||
printf ' "name": "Debug Python with AI",\n'
|
||||
printf ' "type": "python",\n'
|
||||
printf ' "request": "launch",\n'
|
||||
printf ' "program": "${workspaceFolder}/main.py",\n'
|
||||
printf ' "console": "integratedTerminal",\n'
|
||||
printf ' "python": "%s/.venv/bin/python"\n' "$HOME_DIR"
|
||||
printf ' }\n'
|
||||
printf ' ]\n'
|
||||
printf '}\n'
|
||||
} > "$WINDSURF_DIR/data/User/launch.json"
|
||||
|
||||
# Set proper ownership (Unix-like systems only)
|
||||
if [[ "$OSTYPE" != "msys" && "$OSTYPE" != "cygwin" && "$OSTYPE" != "win32" ]]; then
|
||||
if command -v chown >/dev/null 2>&1; then
|
||||
chown -R "$USER_NAME:$USER_NAME" "$WINDSURF_DIR" 2>/dev/null || {
|
||||
echo "⚠️ Could not set ownership - you may need to run with appropriate permissions"
|
||||
}
|
||||
fi
|
||||
fi
|
||||
|
||||
# Create Windsurf AI helper script
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Windsurf AI Helper Commands\n'
|
||||
printf 'echo "🌊 Windsurf AI Assistant Helper"\n'
|
||||
printf 'echo "============================"\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "AI Features:"\n'
|
||||
printf 'echo " Ctrl+Shift+A # Open AI Chat"\n'
|
||||
printf 'echo " Ctrl+Shift+C # Generate Code with AI"\n'
|
||||
printf 'echo " Ctrl+Shift+R # Refactor Selection with AI"\n'
|
||||
printf 'echo " Ctrl+Shift+E # Explain Code with AI"\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "AI-Enhanced Snippets:"\n'
|
||||
printf 'echo " ai-comment # Add AI assistance comment"\n'
|
||||
printf 'echo " fastapi-ai # FastAPI with AI comments"\n'
|
||||
printf 'echo " react-ai # React component with AI"\n'
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "Configuration Location:"\n'
|
||||
printf 'echo " Settings: %s/data/User/settings.json"\n' "$WINDSURF_DIR"
|
||||
printf 'echo " Keybindings: %s/data/User/keybindings.json"\n' "$WINDSURF_DIR"
|
||||
printf 'echo " Snippets: %s/data/User/snippets/"\n' "$WINDSURF_DIR"
|
||||
printf 'echo ""\n'
|
||||
printf 'echo "💡 Windsurf AI is enabled with optimized settings for development"\n'
|
||||
} > "$WINDSURF_DIR/windsurf-help"
|
||||
|
||||
# Make helper script executable (Unix-like systems only)
|
||||
if [[ "$OSTYPE" != "msys" && "$OSTYPE" != "cygwin" && "$OSTYPE" != "win32" ]]; then
|
||||
chmod +x "$WINDSURF_DIR/windsurf-help"
|
||||
fi
|
||||
|
||||
echo "✅ Windsurf IDE support configured"
|
||||
echo "🌊 Windsurf AI features enabled with optimized settings"
|
||||
echo "⌨️ Keyboard shortcuts: Ctrl+Shift+A (chat), Ctrl+Shift+C (generate), Ctrl+Shift+R (refactor)"
|
||||
echo "📁 Configuration stored in: $WINDSURF_DIR/"
|
||||
echo "💡 Run '$WINDSURF_DIR/windsurf-help' for quick reference"
|
||||
echo "Windsurf settings written to $WIND_DIR"
|
||||
|
||||
539
tf/scripts/workspace-setup.sh
Normal file → Executable file
539
tf/scripts/workspace-setup.sh
Normal file → Executable file
@@ -1,497 +1,70 @@
|
||||
#!/bin/bash
|
||||
# Convert CRLF to LF if present (handles Windows line endings)
|
||||
if command -v dos2unix >/dev/null 2>&1; then
|
||||
dos2unix "$0" 2>/dev/null || true
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
USER_NAME="${USER:-coder}"
|
||||
HOME_DIR="${HOME:-/home/${USER_NAME}}"
|
||||
WORKSPACES_DIR="/workspaces"
|
||||
BIN_DIR="${HOME_DIR}/bin"
|
||||
META_DIR="/tmp/git-metadata"
|
||||
|
||||
mkdir -p "${BIN_DIR}" "${HOME_DIR}/.config" "${META_DIR}" "${WORKSPACES_DIR}"
|
||||
|
||||
if [[ -n "${CODER_WORKSPACE_ID:-}" ]]; then
|
||||
echo "${CODER_WORKSPACE_ID}" > "${META_DIR}/workspace-id"
|
||||
fi
|
||||
|
||||
set -e
|
||||
echo "🚀 Initializing development environment as user: $(whoami)"
|
||||
|
||||
# =============================================================================
|
||||
# Cross-platform system detection and configuration
|
||||
# =============================================================================
|
||||
detect_system() {
|
||||
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
if command -v apt-get >/dev/null 2>&1; then
|
||||
SYSTEM="debian"
|
||||
PKG_MANAGER="apt-get"
|
||||
elif command -v yum >/dev/null 2>&1; then
|
||||
SYSTEM="rhel"
|
||||
PKG_MANAGER="yum"
|
||||
elif command -v pacman >/dev/null 2>&1; then
|
||||
SYSTEM="arch"
|
||||
PKG_MANAGER="pacman"
|
||||
else
|
||||
SYSTEM="linux"
|
||||
PKG_MANAGER="unknown"
|
||||
fi
|
||||
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
SYSTEM="macos"
|
||||
PKG_MANAGER="brew"
|
||||
elif [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
|
||||
SYSTEM="windows"
|
||||
PKG_MANAGER="choco"
|
||||
else
|
||||
SYSTEM="unknown"
|
||||
PKG_MANAGER="unknown"
|
||||
fi
|
||||
}
|
||||
|
||||
detect_system
|
||||
|
||||
# Cross-platform directory and user detection
|
||||
if [[ "$SYSTEM" == "windows" ]]; then
|
||||
HOME_DIR="${USERPROFILE:-$HOME}"
|
||||
USER_NAME="${USERNAME:-${USER:-coder}}"
|
||||
WORKSPACES_DIR="$HOME_DIR/workspaces"
|
||||
TEMP_DIR="${TEMP:-/tmp}"
|
||||
BIN_DIR="$HOME_DIR/bin"
|
||||
CONFIG_DIR="$HOME_DIR/.config"
|
||||
else
|
||||
CURRENT_USER="$(whoami)"
|
||||
HOME_DIR="${HOME:-/home/$CURRENT_USER}"
|
||||
USER_NAME="$CURRENT_USER" # Always use actual current user, ignore $USER env var
|
||||
WORKSPACES_DIR="/workspaces"
|
||||
TEMP_DIR="/tmp"
|
||||
BIN_DIR="$HOME_DIR/bin"
|
||||
CONFIG_DIR="$HOME_DIR/.config"
|
||||
# ensure the dev workspace has sensible permissions
|
||||
if command -v chown >/dev/null 2>&1 && [[ "${EUID}" -eq 0 ]]; then
|
||||
chown -R "${USER_NAME}:${USER_NAME}" "${HOME_DIR}" "${WORKSPACES_DIR}" || true
|
||||
fi
|
||||
|
||||
echo "🔍 Running as user: $USER_NAME (actual: $(whoami), \$USER env: ${USER:-not set})"
|
||||
|
||||
# Set default versions if not provided
|
||||
NODE_VERSION="${NODE_VERSION:-20}"
|
||||
PYTHON_VERSION="${PYTHON_VERSION:-3.11}"
|
||||
GIT_AUTHOR_NAME="${GIT_AUTHOR_NAME:-Developer}"
|
||||
GIT_AUTHOR_EMAIL="${GIT_AUTHOR_EMAIL:-dev@example.com}"
|
||||
ENABLE_SERVICES="${ENABLE_SERVICES:-false}"
|
||||
|
||||
# =============================================================================
|
||||
# Create user and directories (Unix-like systems only)
|
||||
# =============================================================================
|
||||
if [[ "$SYSTEM" != "windows" && "$SYSTEM" != "macos" ]]; then
|
||||
if ! id -u "$USER_NAME" >/dev/null 2>&1; then
|
||||
echo "👤 Creating $USER_NAME user..."
|
||||
if [[ "$EUID" -eq 0 ]]; then
|
||||
useradd -m -s /bin/bash -u 1000 "$USER_NAME" 2>/dev/null || {
|
||||
echo "⚠️ Could not create user $USER_NAME"
|
||||
}
|
||||
if command -v usermod >/dev/null 2>&1; then
|
||||
usermod -aG sudo "$USER_NAME" 2>/dev/null || true
|
||||
fi
|
||||
if [[ -f "/etc/sudoers" ]]; then
|
||||
echo "$USER_NAME ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers 2>/dev/null || true
|
||||
fi
|
||||
else
|
||||
echo "⚠️ Not running as root, skipping user creation"
|
||||
fi
|
||||
fi
|
||||
# Configure git user (startup script already sets git config, but we guard here too)
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
if [[ -n "${GIT_AUTHOR_NAME:-}" ]]; then
|
||||
git config --global user.name "${GIT_AUTHOR_NAME}"
|
||||
fi
|
||||
if [[ -n "${GIT_AUTHOR_EMAIL:-}" ]]; then
|
||||
git config --global user.email "${GIT_AUTHOR_EMAIL}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# Create necessary directories
|
||||
# =============================================================================
|
||||
echo "📁 Creating user directories..."
|
||||
mkdir -p "$BIN_DIR"
|
||||
mkdir -p "$HOME_DIR/.local/bin"
|
||||
mkdir -p "$CONFIG_DIR"
|
||||
mkdir -p "$TEMP_DIR/git-metadata"
|
||||
mkdir -p "$WORKSPACES_DIR"
|
||||
# Build a lightweight devinfo helper for quick diagnostics.
|
||||
cat <<'SCRIPT' > "${BIN_DIR}/devinfo"
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Ensure proper ownership (Unix-like systems only)
|
||||
if [[ "$SYSTEM" != "windows" ]] && command -v chown >/dev/null 2>&1; then
|
||||
if [[ "$USER_NAME" != "$(whoami)" ]] && [[ "$EUID" -eq 0 ]]; then
|
||||
chown -R "$USER_NAME:$USER_NAME" "$HOME_DIR" "$WORKSPACES_DIR" 2>/dev/null || true
|
||||
fi
|
||||
echo "Workspace diagnostics"
|
||||
echo "----------------------"
|
||||
echo "User: $(whoami)"
|
||||
echo "Home: ${HOME}"
|
||||
echo "Workspace: /workspaces"
|
||||
|
||||
if command -v node >/dev/null 2>&1; then
|
||||
echo "Node: $(node --version)"
|
||||
fi
|
||||
|
||||
# Save workspace ID if available
|
||||
if [[ -n "$CODER_WORKSPACE_ID" ]]; then
|
||||
echo "$CODER_WORKSPACE_ID" > "$TEMP_DIR/git-metadata/workspace-id"
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# Environment setup
|
||||
# =============================================================================
|
||||
echo "🔄 Setting up environment context..."
|
||||
export HOME="$HOME_DIR"
|
||||
export USER="$USER_NAME"
|
||||
|
||||
# =============================================================================
|
||||
# Git Configuration
|
||||
# =============================================================================
|
||||
echo "⚙️ Configuring Git..."
|
||||
git config --global user.name "$GIT_AUTHOR_NAME"
|
||||
git config --global user.email "$GIT_AUTHOR_EMAIL"
|
||||
git config --global commit.gpgsign false
|
||||
git config --global tag.gpgsign false
|
||||
git config --global init.defaultBranch main
|
||||
git config --global pull.rebase false
|
||||
|
||||
# Capture and log git information
|
||||
echo "📝 Capturing Git metadata..."
|
||||
cd "$WORKSPACES_DIR"
|
||||
if [ -d ".git" ]; then
|
||||
git branch --show-current > "$TEMP_DIR/git-metadata/current-branch" 2>/dev/null || printf "main" > "$TEMP_DIR/git-metadata/current-branch"
|
||||
git rev-parse HEAD > "$TEMP_DIR/git-metadata/commit-hash" 2>/dev/null || printf "no-commits" > "$TEMP_DIR/git-metadata/commit-hash"
|
||||
git remote get-url origin > "$TEMP_DIR/git-metadata/remote-url" 2>/dev/null || printf "no-remote" > "$TEMP_DIR/git-metadata/remote-url"
|
||||
else
|
||||
printf "no-repo" > "$TEMP_DIR/git-metadata/current-branch"
|
||||
printf "no-repo" > "$TEMP_DIR/git-metadata/commit-hash"
|
||||
printf "no-repo" > "$TEMP_DIR/git-metadata/remote-url"
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# System Package Updates and Installation
|
||||
# =============================================================================
|
||||
install_system_packages() {
|
||||
echo "📦 Installing system packages..."
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update -qq 2>/dev/null || true
|
||||
apt-get install -y make tree jq curl wget unzip build-essential postgresql-client redis-tools 2>/dev/null || {
|
||||
echo "⚠️ Some packages failed to install"
|
||||
}
|
||||
# Install ranger separately as it often fails with other packages
|
||||
apt-get install -y ranger 2>/dev/null || echo "⚠️ ranger installation failed"
|
||||
;;
|
||||
"rhel")
|
||||
yum update -y 2>/dev/null || dnf update -y 2>/dev/null || true
|
||||
yum groupinstall -y "Development Tools" 2>/dev/null || dnf groupinstall -y "Development Tools" 2>/dev/null || true
|
||||
yum install -y make tree jq curl wget unzip 2>/dev/null || dnf install -y make tree jq curl wget unzip 2>/dev/null || true
|
||||
;;
|
||||
"macos")
|
||||
if command -v brew >/dev/null 2>&1; then
|
||||
brew install make tree jq curl wget unzip 2>/dev/null || true
|
||||
else
|
||||
echo "⚠️ Homebrew not found. Please install build tools manually."
|
||||
fi
|
||||
;;
|
||||
"windows")
|
||||
if command -v choco >/dev/null 2>&1; then
|
||||
choco install -y make tree jq curl wget unzip 2>/dev/null || true
|
||||
elif command -v winget >/dev/null 2>&1; then
|
||||
winget install make tree jq curl wget unzip 2>/dev/null || true
|
||||
else
|
||||
echo "⚠️ Package manager not found. Please install build tools manually."
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
echo "⚠️ Unknown system. Please install build tools manually."
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Install packages based on privileges
|
||||
echo "📦 Installing system packages..."
|
||||
|
||||
# Add initial delay to let port forwarding script run first
|
||||
if [[ "$SYSTEM" == "debian" ]]; then
|
||||
echo "📦 Waiting for initial system setup to complete..."
|
||||
sleep 30
|
||||
fi
|
||||
|
||||
if [[ "$EUID" -eq 0 ]]; then
|
||||
# Running as root, no sudo needed
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Simple wait for APT to be available
|
||||
echo "📦 Waiting for other package managers to finish..."
|
||||
retries=0
|
||||
max_retries=30
|
||||
|
||||
while [ $retries -lt $max_retries ]; do
|
||||
if ! (fuser /var/lib/apt/lists/lock >/dev/null 2>&1 || fuser /var/lib/dpkg/lock >/dev/null 2>&1 || fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1); then
|
||||
break
|
||||
fi
|
||||
echo "APT is locked, waiting... ($retries/$max_retries)"
|
||||
sleep 2
|
||||
retries=$((retries + 1))
|
||||
done
|
||||
|
||||
echo "📦 Updating package lists..."
|
||||
apt-get update 2>&1 | grep -v "^Get:" | grep -v "^Hit:" || true
|
||||
echo "📦 Installing core packages..."
|
||||
apt-get install -y make tree jq curl wget unzip build-essential postgresql-client redis-tools || {
|
||||
echo "⚠️ Some packages failed to install"
|
||||
}
|
||||
# Install ranger separately as it often fails with other packages
|
||||
echo "📦 Installing ranger file manager..."
|
||||
apt-get install -y ranger || echo "⚠️ ranger not available in this repository"
|
||||
;;
|
||||
"rhel")
|
||||
yum update -y 2>/dev/null || dnf update -y 2>/dev/null || true
|
||||
yum groupinstall -y "Development Tools" 2>/dev/null || dnf groupinstall -y "Development Tools" 2>/dev/null || true
|
||||
yum install -y make tree jq curl wget unzip postgresql redis ranger 2>/dev/null || dnf install -y make tree jq curl wget unzip postgresql redis ranger 2>/dev/null || true
|
||||
;;
|
||||
esac
|
||||
elif command -v sudo >/dev/null 2>&1; then
|
||||
# Not root but sudo is available
|
||||
echo "📦 Installing system packages with sudo..."
|
||||
case "$SYSTEM" in
|
||||
"debian")
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
sudo apt-get update -qq 2>/dev/null || true
|
||||
sudo apt-get install -y make tree jq curl wget unzip build-essential postgresql-client redis-tools ranger 2>/dev/null || {
|
||||
echo "⚠️ Some packages failed to install"
|
||||
}
|
||||
;;
|
||||
"rhel")
|
||||
sudo yum update -y 2>/dev/null || sudo dnf update -y 2>/dev/null || true
|
||||
sudo yum groupinstall -y "Development Tools" 2>/dev/null || sudo dnf groupinstall -y "Development Tools" 2>/dev/null || true
|
||||
sudo yum install -y make tree jq curl wget unzip postgresql redis ranger 2>/dev/null || sudo dnf install -y make tree jq curl wget unzip postgresql redis ranger 2>/dev/null || true
|
||||
;;
|
||||
esac
|
||||
elif [[ "$SYSTEM" == "macos" ]] || [[ "$SYSTEM" == "windows" ]]; then
|
||||
install_system_packages
|
||||
else
|
||||
echo "⚠️ Not running with appropriate privileges, skipping system package installation"
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# Node.js and npm Setup
|
||||
# =============================================================================
|
||||
echo "🟢 Setting up Node.js and npm..."
|
||||
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Node.js setup script\n'
|
||||
printf 'if ! command -v nvm >/dev/null 2>&1; then\n'
|
||||
printf ' curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash 2>/dev/null || {\n'
|
||||
printf ' echo "⚠️ Failed to install nvm"\n'
|
||||
printf ' exit 1\n'
|
||||
printf ' }\n'
|
||||
printf ' export NVM_DIR="%s/.nvm"\n' "$HOME_DIR"
|
||||
printf ' [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"\n'
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf 'export NVM_DIR="%s/.nvm"\n' "$HOME_DIR"
|
||||
printf '[ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh"\n'
|
||||
printf 'nvm install %s 2>/dev/null || echo "⚠️ Failed to install Node.js %s"\n' "$NODE_VERSION" "$NODE_VERSION"
|
||||
printf 'nvm use %s 2>/dev/null || true\n' "$NODE_VERSION"
|
||||
printf 'nvm alias default %s 2>/dev/null || true\n' "$NODE_VERSION"
|
||||
printf '\n'
|
||||
printf 'echo "📦 Installing npm packages..."\n'
|
||||
printf 'for package in repomix create-next-app nodemon concurrently @types/node typescript eslint prettier; do\n'
|
||||
printf ' npm install -g "$package" 2>/dev/null || echo "⚠️ Failed to install $package"\n'
|
||||
printf 'done\n'
|
||||
} > "$TEMP_DIR/node_setup.sh"
|
||||
|
||||
chmod +x "$TEMP_DIR/node_setup.sh"
|
||||
if [[ "$USER_NAME" != "$(whoami)" ]] && command -v su >/dev/null 2>&1; then
|
||||
su - "$USER_NAME" -c "$TEMP_DIR/node_setup.sh" 2>/dev/null || bash "$TEMP_DIR/node_setup.sh"
|
||||
else
|
||||
bash "$TEMP_DIR/node_setup.sh"
|
||||
fi
|
||||
rm -f "$TEMP_DIR/node_setup.sh"
|
||||
|
||||
# =============================================================================
|
||||
# Python Setup with uv
|
||||
# =============================================================================
|
||||
echo "🐍 Setting up Python and uv..."
|
||||
|
||||
# Install Python version (Linux only)
|
||||
if [[ "$SYSTEM" == "debian" ]] && [[ "$EUID" -eq 0 ]]; then
|
||||
apt-get install -y "python$PYTHON_VERSION" "python$PYTHON_VERSION-dev" "python$PYTHON_VERSION-venv" 2>/dev/null || {
|
||||
echo "⚠️ Failed to install Python $PYTHON_VERSION"
|
||||
}
|
||||
fi
|
||||
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Python setup script\n'
|
||||
printf 'curl -LsSf https://astral.sh/uv/install.sh | sh 2>/dev/null || {\n'
|
||||
printf ' echo "⚠️ Failed to install uv"\n'
|
||||
printf ' exit 1\n'
|
||||
printf '}\n'
|
||||
printf 'export PATH="%s/.cargo/bin:$PATH"\n' "$HOME_DIR"
|
||||
printf '\n'
|
||||
printf 'echo "📦 Installing Python developer tools..."\n'
|
||||
printf '# Install tools that provide command-line executables\n'
|
||||
printf 'uv tool install ruff 2>/dev/null || echo "✅ ruff installed"\n'
|
||||
printf 'uv tool install mypy 2>/dev/null || echo "✅ mypy installed"\n'
|
||||
printf 'uv tool install black 2>/dev/null || echo "✅ black installed"\n'
|
||||
printf 'uv tool install pytest 2>/dev/null || echo "✅ pytest installed"\n'
|
||||
printf 'uv tool install poetry 2>/dev/null || echo "✅ poetry installed"\n'
|
||||
printf 'uv tool install ipython 2>/dev/null || echo "✅ ipython installed"\n'
|
||||
printf '# Note: Libraries like pandas, requests, etc. should be installed in project virtual environments\n'
|
||||
printf '\n'
|
||||
printf 'uv venv "%s/.venv" --python=%s 2>/dev/null || echo "⚠️ Failed to create venv"\n' "$HOME_DIR" "$PYTHON_VERSION"
|
||||
printf '\n'
|
||||
printf '# Add venv activation to shell config\n'
|
||||
printf 'SHELL_RC="%s/.bashrc"\n' "$HOME_DIR"
|
||||
printf 'if [[ "$SHELL" == *"zsh"* && -f "%s/.zshrc" ]]; then\n' "$HOME_DIR"
|
||||
printf ' SHELL_RC="%s/.zshrc"\n' "$HOME_DIR"
|
||||
printf 'fi\n'
|
||||
printf 'if ! grep -q "source %s/.venv/bin/activate" "$SHELL_RC" 2>/dev/null; then\n' "$HOME_DIR"
|
||||
printf ' printf "source %s/.venv/bin/activate\\n" >> "$SHELL_RC"\n' "$HOME_DIR"
|
||||
printf 'fi\n'
|
||||
} > "$TEMP_DIR/python_setup.sh"
|
||||
|
||||
chmod +x "$TEMP_DIR/python_setup.sh"
|
||||
if [[ "$USER_NAME" != "$(whoami)" ]] && command -v su >/dev/null 2>&1; then
|
||||
su - "$USER_NAME" -c "$TEMP_DIR/python_setup.sh" 2>/dev/null || bash "$TEMP_DIR/python_setup.sh"
|
||||
else
|
||||
bash "$TEMP_DIR/python_setup.sh"
|
||||
fi
|
||||
rm -f "$TEMP_DIR/python_setup.sh"
|
||||
|
||||
# =============================================================================
|
||||
# Rust and Cargo Setup
|
||||
# =============================================================================
|
||||
echo "🦀 Installing Rust and Cargo..."
|
||||
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Rust setup script\n'
|
||||
printf 'curl --proto "=https" --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain stable 2>/dev/null || {\n'
|
||||
printf ' echo "⚠️ Failed to install Rust"\n'
|
||||
printf ' exit 1\n'
|
||||
printf '}\n'
|
||||
printf 'source "%s/.cargo/env"\n' "$HOME_DIR"
|
||||
printf '\n'
|
||||
printf '# Add cargo to shell config\n'
|
||||
printf 'SHELL_RC="%s/.bashrc"\n' "$HOME_DIR"
|
||||
printf 'if [[ "$SHELL" == *"zsh"* && -f "%s/.zshrc" ]]; then\n' "$HOME_DIR"
|
||||
printf ' SHELL_RC="%s/.zshrc"\n' "$HOME_DIR"
|
||||
printf 'fi\n'
|
||||
printf 'if ! grep -q "export PATH=.*cargo.*bin" "$SHELL_RC" 2>/dev/null; then\n'
|
||||
printf ' printf "export PATH=\\"%s/.cargo/bin:$PATH\\"\\n" >> "$SHELL_RC"\n' "$HOME_DIR"
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf 'cargo install cargo-watch cargo-edit cargo-audit 2>/dev/null || echo "⚠️ Failed to install some cargo tools"\n'
|
||||
} > "$TEMP_DIR/rust_setup.sh"
|
||||
|
||||
chmod +x "$TEMP_DIR/rust_setup.sh"
|
||||
if [[ "$USER_NAME" != "$(whoami)" ]] && command -v su >/dev/null 2>&1; then
|
||||
su - "$USER_NAME" -c "$TEMP_DIR/rust_setup.sh" 2>/dev/null || bash "$TEMP_DIR/rust_setup.sh"
|
||||
else
|
||||
bash "$TEMP_DIR/rust_setup.sh"
|
||||
fi
|
||||
rm -f "$TEMP_DIR/rust_setup.sh"
|
||||
|
||||
# =============================================================================
|
||||
# repomix Installation
|
||||
# =============================================================================
|
||||
echo "📁 Installing repomix..."
|
||||
if command -v npm >/dev/null 2>&1; then
|
||||
if [[ "$USER_NAME" != "$(whoami)" ]] && command -v su >/dev/null 2>&1 && id "$USER_NAME" >/dev/null 2>&1; then
|
||||
su - "$USER_NAME" -c "npm install -g repomix 2>/dev/null || echo '⚠️ Failed to install repomix'"
|
||||
else
|
||||
npm install -g repomix 2>/dev/null || echo "⚠️ Failed to install repomix"
|
||||
fi
|
||||
else
|
||||
echo "⚠️ npm not available, skipping repomix installation"
|
||||
echo "npm: $(npm --version)"
|
||||
fi
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
echo "Python: $(python3 --version | awk '{print $2}')"
|
||||
fi
|
||||
if command -v rustc >/dev/null 2>&1; then
|
||||
echo "Rust: $(rustc --version | awk '{print $2}')"
|
||||
fi
|
||||
if command -v cargo >/dev/null 2>&1; then
|
||||
echo "Cargo: $(cargo --version | awk '{print $2}')"
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# Shell Configuration
|
||||
# =============================================================================
|
||||
echo "🐚 Setting up shell environment..."
|
||||
|
||||
# Create devinfo script
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Development environment info script\n'
|
||||
printf 'mkdir -p "%s"\n' "$BIN_DIR"
|
||||
printf '{\n'
|
||||
printf ' printf "#!/bin/bash\\n"\n'
|
||||
printf ' printf "echo '\''🚀 Development Environment Info'\''\\n"\n'
|
||||
printf ' printf "echo '\''==============================='\''\\n"\n'
|
||||
printf ' printf "echo \\'\\'\\n"\n'
|
||||
printf ' printf "echo '\''🔧 Installed Tools:'\''\\n"\n'
|
||||
printf ' printf "echo '\'' Node.js: '\''\\$(node --version 2>/dev/null || echo '\''Not found'\'')\\n"\n'
|
||||
printf ' printf "echo '\'' npm: '\''\\$(npm --version 2>/dev/null || echo '\''Not found'\'')\\n"\n'
|
||||
printf ' printf "echo '\'' Python: '\''\\$(python%s --version 2>/dev/null || echo '\''Not found'\'')\\n"\n' "$PYTHON_VERSION"
|
||||
printf ' printf "echo '\'' uv: '\''\\$(uv --version 2>/dev/null || echo '\''Not found'\'')\\n"\n'
|
||||
printf ' printf "echo '\'' Rust: '\''\\$(rustc --version 2>/dev/null || echo '\''Not found'\'')\\n"\n'
|
||||
printf ' printf "echo '\'' Cargo: '\''\\$(cargo --version 2>/dev/null || echo '\''Not found'\'')\\n"\n'
|
||||
printf ' printf "echo '\'' repomix: '\''\\$(repomix --version 2>/dev/null || echo '\''Not found'\'')\\n"\n'
|
||||
printf ' printf "echo \\'\\'\\n"\n'
|
||||
printf ' printf "echo '\''🗄️ Database Services:'\''\\n"\n'
|
||||
printf ' printf "if [ \\"%s\\" = \\"true\\" ]; then\\n" "%s"\n' "$ENABLE_SERVICES" "$ENABLE_SERVICES"
|
||||
printf ' printf " echo '\'' PostgreSQL: '\''\\${POSTGRES_URL:-Not configured}\\n"\n'
|
||||
printf ' printf " echo '\'' Redis: '\''\\${REDIS_URL:-Not configured}\\n"\n'
|
||||
printf ' printf " echo '\'' Qdrant: '\''\\${QDRANT_URL:-Not configured}\\n"\n'
|
||||
printf ' printf "else\\n"\n'
|
||||
printf ' printf " echo '\'' Services disabled'\''\\n"\n'
|
||||
printf ' printf "fi\\n"\n'
|
||||
printf ' printf "echo \\'\\'\\n"\n'
|
||||
printf ' printf "echo '\''📝 Git Metadata:'\''\\n"\n'
|
||||
printf ' printf "if [ -f %s/git-metadata/current-branch ]; then\\n" "%s"\n' "$TEMP_DIR" "$TEMP_DIR"
|
||||
printf ' printf " echo '\'' Branch: '\''\\$(cat %s/git-metadata/current-branch)\\n" "%s"\n' "$TEMP_DIR" "$TEMP_DIR"
|
||||
printf ' printf " echo '\'' Commit: '\''\\$(cat %s/git-metadata/commit-hash)\\n" "%s"\n' "$TEMP_DIR" "$TEMP_DIR"
|
||||
printf ' printf " echo '\'' Remote: '\''\\$(cat %s/git-metadata/remote-url)\\n" "%s"\n' "$TEMP_DIR" "$TEMP_DIR"
|
||||
printf ' printf "fi\\n"\n'
|
||||
printf '} > "%s/devinfo"\n' "$BIN_DIR"
|
||||
printf 'chmod +x "%s/devinfo"\n' "$BIN_DIR"
|
||||
} > "$TEMP_DIR/devinfo_script.sh"
|
||||
|
||||
chmod +x "$TEMP_DIR/devinfo_script.sh"
|
||||
if [[ "$USER_NAME" != "$(whoami)" ]] && command -v su >/dev/null 2>&1; then
|
||||
su - "$USER_NAME" -c "$TEMP_DIR/devinfo_script.sh" 2>/dev/null || bash "$TEMP_DIR/devinfo_script.sh"
|
||||
else
|
||||
bash "$TEMP_DIR/devinfo_script.sh"
|
||||
if [[ -n "${POSTGRES_URL:-}" ]]; then
|
||||
echo "PostgreSQL: ${POSTGRES_URL}"
|
||||
fi
|
||||
rm -f "$TEMP_DIR/devinfo_script.sh"
|
||||
|
||||
# Create shell aliases
|
||||
{
|
||||
printf '#!/bin/bash\n'
|
||||
printf '# Shell configuration script\n'
|
||||
printf 'SHELL_RC="%s/.bashrc"\n' "$HOME_DIR"
|
||||
printf 'if [[ "$SHELL" == *"zsh"* && -f "%s/.zshrc" ]]; then\n' "$HOME_DIR"
|
||||
printf ' SHELL_RC="%s/.zshrc"\n' "$HOME_DIR"
|
||||
printf 'fi\n'
|
||||
printf '\n'
|
||||
printf 'if ! grep -q "# Development Environment Aliases" "$SHELL_RC" 2>/dev/null; then\n'
|
||||
printf ' printf "\\n# Development Environment Aliases\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias ll='\''ls -alF'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias la='\''ls -A'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias l='\''ls -CF'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias gs='\''git status'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias gp='\''git push'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias gc='\''git commit'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias gco='\''git checkout'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias gb='\''git branch'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias devinfo='\''%s/devinfo'\''\\n" >> "$SHELL_RC"\n' "$BIN_DIR"
|
||||
printf ' printf "alias pip='\''uv pip'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias python='\''python%s'\''\\n" >> "$SHELL_RC"\n' "$PYTHON_VERSION"
|
||||
printf ' if command -v docker >/dev/null 2>&1; then\n'
|
||||
printf ' printf "alias dps='\''docker ps'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias dimg='\''docker images'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' printf "alias dlog='\''docker logs'\''\\n" >> "$SHELL_RC"\n'
|
||||
printf ' fi\n'
|
||||
printf ' printf "\\n" >> "$SHELL_RC"\n'
|
||||
printf 'fi\n'
|
||||
} > "$TEMP_DIR/bashrc_setup.sh"
|
||||
|
||||
chmod +x "$TEMP_DIR/bashrc_setup.sh"
|
||||
if [[ "$USER_NAME" != "$(whoami)" ]] && command -v su >/dev/null 2>&1; then
|
||||
su - "$USER_NAME" -c "$TEMP_DIR/bashrc_setup.sh" 2>/dev/null || bash "$TEMP_DIR/bashrc_setup.sh"
|
||||
else
|
||||
bash "$TEMP_DIR/bashrc_setup.sh"
|
||||
if [[ -n "${REDIS_URL:-}" ]]; then
|
||||
echo "Redis: ${REDIS_URL}"
|
||||
fi
|
||||
rm -f "$TEMP_DIR/bashrc_setup.sh"
|
||||
|
||||
# =============================================================================
|
||||
# Final Environment Setup
|
||||
# =============================================================================
|
||||
echo "✅ Development environment initialization complete!"
|
||||
echo ""
|
||||
echo "🎉 Available tools:"
|
||||
printf " - Node.js %s with npm packages\n" "$NODE_VERSION"
|
||||
printf " - Python %s with uv package manager\n" "$PYTHON_VERSION"
|
||||
echo " - Rust with Cargo"
|
||||
echo " - repomix for repository packaging"
|
||||
echo " - make, tree, and other build tools"
|
||||
if [ "$ENABLE_SERVICES" = "true" ]; then
|
||||
echo " - PostgreSQL, Redis, Qdrant databases"
|
||||
if [[ -n "${QDRANT_URL:-}" ]]; then
|
||||
echo "Qdrant: ${QDRANT_URL}"
|
||||
fi
|
||||
echo ""
|
||||
echo "🔧 Run 'devinfo' for detailed environment information"
|
||||
echo "🚀 Ready for development!"
|
||||
echo "💡 Restart your shell or source your shell config to use new aliases"
|
||||
SCRIPT
|
||||
chmod +x "${BIN_DIR}/devinfo"
|
||||
|
||||
echo "Workspace initialisation complete. Run 'devinfo' for a quick status summary."
|
||||
|
||||
109
tf/services.tf
109
tf/services.tf
@@ -1,38 +1,26 @@
|
||||
# =============================================================================
|
||||
# Service Containers - Database and Development Services
|
||||
# PostgreSQL, Redis, Qdrant, Docker Registry
|
||||
# =============================================================================
|
||||
# Data services run inside the per-workspace Docker network. They stay optional
|
||||
# so light-weight workspaces can skip all of them.
|
||||
|
||||
# =============================================================================
|
||||
# PostgreSQL Database Service
|
||||
# =============================================================================
|
||||
|
||||
# PostgreSQL data volume for persistence
|
||||
resource "docker_volume" "postgres_data" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
count = local.services_enabled ? 1 : 0
|
||||
name = "postgres-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "postgres"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
# PostgreSQL container
|
||||
resource "docker_container" "postgres" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
count = local.services_enabled ? 1 : 0
|
||||
image = "postgres:${var.postgres_version}-alpine"
|
||||
name = "postgres-${local.workspace_id}"
|
||||
|
||||
# Resource limits for better isolation
|
||||
memory = 2048 * 1024 * 1024 # 2GB
|
||||
cpu_shares = 512 # Medium priority
|
||||
|
||||
# PostgreSQL configuration
|
||||
env = [
|
||||
"POSTGRES_DB=postgres",
|
||||
"POSTGRES_USER=postgres",
|
||||
@@ -42,24 +30,21 @@ resource "docker_container" "postgres" {
|
||||
"POSTGRES_MAX_CONNECTIONS=${var.postgres_max_connections}"
|
||||
]
|
||||
|
||||
# Network configuration - internal only, accessible via Coder port forwarding
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
# Data persistence
|
||||
volumes {
|
||||
volume_name = docker_volume.postgres_data[0].name
|
||||
container_path = "/var/lib/postgresql/data"
|
||||
}
|
||||
|
||||
# Health check with proper timing for database startup
|
||||
healthcheck {
|
||||
test = ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval = "15s"
|
||||
timeout = "5s"
|
||||
retries = 5
|
||||
start_period = "30s" # Give PostgreSQL time to initialize
|
||||
start_period = "30s"
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
@@ -68,42 +53,33 @@ resource "docker_container" "postgres" {
|
||||
label = "coder.service"
|
||||
value = "postgres"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Redis Cache Service
|
||||
# =============================================================================
|
||||
|
||||
# Redis data volume for persistence
|
||||
resource "docker_volume" "redis_data" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
count = local.services_enabled ? 1 : 0
|
||||
name = "redis-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "redis"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
# Redis container
|
||||
resource "docker_container" "redis" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
count = local.services_enabled ? 1 : 0
|
||||
image = "redis:${var.redis_version}-alpine"
|
||||
name = "redis-${local.workspace_id}"
|
||||
|
||||
# Resource limits for better isolation
|
||||
memory = 1024 * 1024 * 1024 # 1GB
|
||||
cpu_shares = 256 # Lower priority
|
||||
|
||||
# Redis configuration with authentication
|
||||
command = [
|
||||
"redis-server",
|
||||
"--requirepass", var.redis_password,
|
||||
@@ -117,19 +93,17 @@ resource "docker_container" "redis" {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
# Data persistence
|
||||
volumes {
|
||||
volume_name = docker_volume.redis_data[0].name
|
||||
container_path = "/data"
|
||||
}
|
||||
|
||||
# Health check with authentication and proper timing
|
||||
healthcheck {
|
||||
test = ["CMD", "redis-cli", "-a", var.redis_password, "ping"]
|
||||
interval = "15s"
|
||||
timeout = "3s"
|
||||
retries = 5
|
||||
start_period = "10s" # Redis starts quickly
|
||||
start_period = "10s"
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
@@ -138,42 +112,33 @@ resource "docker_container" "redis" {
|
||||
label = "coder.service"
|
||||
value = "redis"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Qdrant Vector Database Service
|
||||
# =============================================================================
|
||||
|
||||
# Qdrant data volume for persistence
|
||||
resource "docker_volume" "qdrant_data" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
count = local.services_enabled ? 1 : 0
|
||||
name = "qdrant-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "qdrant"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
# Qdrant container
|
||||
resource "docker_container" "qdrant" {
|
||||
count = data.coder_parameter.enable_services.value ? 1 : 0
|
||||
count = local.services_enabled ? 1 : 0
|
||||
image = "qdrant/qdrant:${var.qdrant_version}"
|
||||
name = "qdrant-${local.workspace_id}"
|
||||
|
||||
# Resource limits for better isolation
|
||||
memory = 2048 * 1024 * 1024 # 2GB for vector operations
|
||||
cpu_shares = 512 # Medium priority
|
||||
|
||||
# Qdrant configuration
|
||||
env = [
|
||||
"QDRANT__SERVICE__HTTP_PORT=6333",
|
||||
"QDRANT__SERVICE__GRPC_PORT=6334",
|
||||
@@ -185,19 +150,17 @@ resource "docker_container" "qdrant" {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
# Data persistence
|
||||
volumes {
|
||||
volume_name = docker_volume.qdrant_data[0].name
|
||||
container_path = "/qdrant/storage"
|
||||
}
|
||||
|
||||
# Health check using Qdrant's HTTP endpoint with proper timing
|
||||
healthcheck {
|
||||
test = ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:6333/health || exit 1"]
|
||||
interval = "20s"
|
||||
timeout = "5s"
|
||||
retries = 5
|
||||
start_period = "40s" # Qdrant needs time to initialize storage
|
||||
start_period = "40s"
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
@@ -206,39 +169,33 @@ resource "docker_container" "qdrant" {
|
||||
label = "coder.service"
|
||||
value = "qdrant"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# pgAdmin for PostgreSQL Management (Optional)
|
||||
# =============================================================================
|
||||
|
||||
# pgAdmin data volume
|
||||
resource "docker_volume" "pgadmin_data" {
|
||||
count = data.coder_parameter.enable_services.value && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
count = local.services_enabled && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
name = "pgadmin-data-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.service"
|
||||
value = "pgadmin"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
# pgAdmin container
|
||||
resource "docker_container" "pgadmin" {
|
||||
count = data.coder_parameter.enable_services.value && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
count = local.services_enabled && data.coder_parameter.enable_pgadmin.value ? 1 : 0
|
||||
image = "dpage/pgadmin4:latest"
|
||||
name = "pgadmin-${local.workspace_id}"
|
||||
|
||||
# pgAdmin configuration
|
||||
env = [
|
||||
"PGADMIN_DEFAULT_EMAIL=${var.pgadmin_email}",
|
||||
"PGADMIN_DEFAULT_PASSWORD=${var.pgadmin_password}",
|
||||
@@ -251,13 +208,11 @@ resource "docker_container" "pgadmin" {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
# Data persistence
|
||||
volumes {
|
||||
volume_name = docker_volume.pgadmin_data[0].name
|
||||
container_path = "/var/lib/pgadmin"
|
||||
}
|
||||
|
||||
# Health check for pgAdmin web interface
|
||||
healthcheck {
|
||||
test = ["CMD-SHELL", "nc -z localhost 80 || exit 1"]
|
||||
interval = "30s"
|
||||
@@ -272,17 +227,13 @@ resource "docker_container" "pgadmin" {
|
||||
label = "coder.service"
|
||||
value = "pgadmin"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Jupyter Lab Service (Optional for Data Science)
|
||||
# =============================================================================
|
||||
|
||||
# Jupyter data volume
|
||||
resource "docker_volume" "jupyter_data" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
name = "jupyter-data-${local.workspace_id}"
|
||||
@@ -291,19 +242,18 @@ resource "docker_volume" "jupyter_data" {
|
||||
label = "coder.service"
|
||||
value = "jupyter"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
|
||||
# Jupyter Lab container
|
||||
resource "docker_container" "jupyter" {
|
||||
count = data.coder_parameter.enable_jupyter.value ? 1 : 0
|
||||
image = "jupyter/scipy-notebook:latest"
|
||||
name = "jupyter-${local.workspace_id}"
|
||||
|
||||
# Jupyter configuration
|
||||
env = [
|
||||
"JUPYTER_ENABLE_LAB=yes",
|
||||
"JUPYTER_TOKEN=",
|
||||
@@ -315,19 +265,21 @@ resource "docker_container" "jupyter" {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
# Port accessible within workspace network - no host exposure needed
|
||||
|
||||
# Data persistence
|
||||
volumes {
|
||||
volume_name = docker_volume.jupyter_data[0].name
|
||||
container_path = "/home/jovyan/work"
|
||||
}
|
||||
|
||||
# Share workspace volume
|
||||
volumes {
|
||||
volume_name = docker_volume.workspaces.name
|
||||
container_path = "/home/jovyan/workspaces"
|
||||
read_only = false
|
||||
}
|
||||
|
||||
healthcheck {
|
||||
test = ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8888"]
|
||||
interval = "30s"
|
||||
timeout = "10s"
|
||||
retries = 5
|
||||
}
|
||||
|
||||
restart = "unless-stopped"
|
||||
@@ -336,8 +288,9 @@ resource "docker_container" "jupyter" {
|
||||
label = "coder.service"
|
||||
value = "jupyter"
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,107 +1,8 @@
|
||||
# =============================================================================
|
||||
# Terraform Variables Configuration
|
||||
# Variable assignments for the development environment
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# Project Configuration
|
||||
# =============================================================================
|
||||
project_name = "dev-environment"
|
||||
environment = "dev"
|
||||
|
||||
# =============================================================================
|
||||
# Docker Configuration
|
||||
# =============================================================================
|
||||
docker_socket = ""
|
||||
# devcontainer_image is deprecated, using devcontainer_repo_url instead
|
||||
devcontainer_repo_url = "https://github.com/vasceannie/code-tools.git"
|
||||
envbuilder_cache_repo = "local" # Set to your registry URL for faster builds, e.g. "ghcr.io/username/cache"
|
||||
|
||||
# =============================================================================
|
||||
# Development Tool Versions
|
||||
# =============================================================================
|
||||
node_version = "20"
|
||||
python_version = "3.12"
|
||||
postgres_version = "17"
|
||||
redis_version = "7"
|
||||
qdrant_version = "latest"
|
||||
|
||||
# =============================================================================
|
||||
# Service Configuration
|
||||
# =============================================================================
|
||||
devcontainer_image = "mcr.microsoft.com/devcontainers/universal:2-linux"
|
||||
workspace_memory_limit = 8192
|
||||
postgres_password = "devpassword"
|
||||
redis_password = "devpassword"
|
||||
postgres_max_connections = 100
|
||||
redis_max_memory = "512mb"
|
||||
|
||||
# =============================================================================
|
||||
# Network Configuration
|
||||
# =============================================================================
|
||||
pgadmin_port = 5050
|
||||
pgadmin_email = "admin@dev.local"
|
||||
pgadmin_password = "adminpassword"
|
||||
|
||||
# =============================================================================
|
||||
# Development Packages
|
||||
# =============================================================================
|
||||
npm_packages = [
|
||||
"repomix",
|
||||
"create-next-app",
|
||||
"nodemon",
|
||||
"concurrently",
|
||||
"@types/node",
|
||||
"typescript",
|
||||
"eslint",
|
||||
"prettier"
|
||||
]
|
||||
|
||||
python_packages = [
|
||||
"fastapi",
|
||||
"uvicorn",
|
||||
"requests",
|
||||
"pandas",
|
||||
"numpy",
|
||||
"psycopg2-binary",
|
||||
"redis",
|
||||
"qdrant-client",
|
||||
"python-dotenv"
|
||||
]
|
||||
|
||||
system_packages = [
|
||||
"make",
|
||||
"tree",
|
||||
"jq",
|
||||
"curl",
|
||||
"wget",
|
||||
"unzip",
|
||||
"build-essential"
|
||||
]
|
||||
|
||||
# =============================================================================
|
||||
# AI Development Tools
|
||||
# =============================================================================
|
||||
install_claude_code = true
|
||||
install_cursor_support = true
|
||||
install_windsurf_support = true
|
||||
|
||||
# =============================================================================
|
||||
# Performance Configuration
|
||||
# =============================================================================
|
||||
workspace_memory_limit = 16384
|
||||
workspace_cpu_limit = 4
|
||||
|
||||
# =============================================================================
|
||||
# Feature Toggles
|
||||
# =============================================================================
|
||||
enable_pgadmin = true
|
||||
enable_monitoring = true
|
||||
enable_jupyter = false
|
||||
|
||||
# =============================================================================
|
||||
# Common Tags
|
||||
# =============================================================================
|
||||
common_tags = {
|
||||
Environment = "development"
|
||||
ManagedBy = "terraform"
|
||||
Purpose = "remote-development"
|
||||
}
|
||||
pgadmin_email = "admin@dev.local"
|
||||
pgadmin_password = "adminpassword"
|
||||
|
||||
340
tf/variables.tf
340
tf/variables.tf
@@ -1,352 +1,116 @@
|
||||
# =============================================================================
|
||||
# Variable Definitions
|
||||
# Modular Development Environment Configuration
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# Project Configuration
|
||||
# =============================================================================
|
||||
|
||||
variable "project_name" {
|
||||
description = "Name of the project for resource tagging and identification"
|
||||
variable "host_home_path" {
|
||||
description = "Absolute path to the host home directory for bind mounts."
|
||||
type = string
|
||||
default = "dev-environment"
|
||||
|
||||
validation {
|
||||
condition = can(regex("^[a-z0-9-]+$", var.project_name))
|
||||
error_message = "Project name must contain only lowercase letters, numbers, and hyphens."
|
||||
}
|
||||
default = "/home/trav"
|
||||
}
|
||||
|
||||
variable "environment" {
|
||||
description = "Environment designation (dev, staging, prod)"
|
||||
type = string
|
||||
default = "dev"
|
||||
|
||||
validation {
|
||||
condition = contains(["dev", "staging", "prod"], var.environment)
|
||||
error_message = "Environment must be one of: dev, staging, prod."
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Docker Configuration
|
||||
# =============================================================================
|
||||
|
||||
variable "docker_socket" {
|
||||
description = "Docker daemon socket URI (empty for default)"
|
||||
description = "Docker daemon socket URI (leave blank for default)."
|
||||
type = string
|
||||
default = ""
|
||||
}
|
||||
|
||||
variable "devcontainer_image" {
|
||||
description = "Development container image with all required tools pre-installed (deprecated - use devcontainer_repo_url)"
|
||||
description = "Container image used for the main workspace."
|
||||
type = string
|
||||
default = "mcr.microsoft.com/devcontainers/universal:2-linux"
|
||||
}
|
||||
|
||||
variable "devcontainer_repo_url" {
|
||||
description = "Git repository URL containing the devcontainer configuration"
|
||||
type = string
|
||||
default = "https://github.com/vasceannie/code-tools.git"
|
||||
variable "workspace_memory_limit" {
|
||||
description = "Workspace memory limit in MB. Use 0 to inherit the image defaults."
|
||||
type = number
|
||||
default = 8192
|
||||
}
|
||||
|
||||
variable "envbuilder_cache_repo" {
|
||||
description = "Docker registry to use for caching envbuilder layers (e.g., 'ghcr.io/username/cache')"
|
||||
type = string
|
||||
default = "local"
|
||||
variable "enable_docker_in_docker" {
|
||||
description = "Mount /var/run/docker.sock into the workspace container."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Development Tool Versions
|
||||
# =============================================================================
|
||||
|
||||
variable "node_version" {
|
||||
description = "Node.js version to install"
|
||||
type = string
|
||||
default = "20"
|
||||
|
||||
validation {
|
||||
condition = contains(["18", "20", "21"], var.node_version)
|
||||
error_message = "Node.js version must be one of: 18, 20, 21."
|
||||
}
|
||||
}
|
||||
|
||||
variable "python_version" {
|
||||
description = "Python version to install"
|
||||
type = string
|
||||
default = "3.12"
|
||||
|
||||
validation {
|
||||
condition = contains(["3.10", "3.11", "3.12"], var.python_version)
|
||||
error_message = "Python version must be 3.10, 3.11, or 3.12."
|
||||
}
|
||||
variable "block_file_transfer" {
|
||||
description = "Set CODER_AGENT_BLOCK_FILE_TRANSFER=1 to disable file transfer tooling."
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "postgres_version" {
|
||||
description = "PostgreSQL version"
|
||||
description = "PostgreSQL image tag."
|
||||
type = string
|
||||
default = "17"
|
||||
}
|
||||
|
||||
validation {
|
||||
condition = contains(["13", "14", "15", "16", "17"], var.postgres_version)
|
||||
error_message = "PostgreSQL version must be one of: 13, 14, 15, 16, 17."
|
||||
}
|
||||
variable "postgres_password" {
|
||||
description = "PostgreSQL password for the postgres user."
|
||||
type = string
|
||||
default = "devpassword"
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "postgres_max_connections" {
|
||||
description = "Maximum PostgreSQL connections."
|
||||
type = number
|
||||
default = 100
|
||||
}
|
||||
|
||||
variable "redis_version" {
|
||||
description = "Redis version"
|
||||
description = "Redis image tag."
|
||||
type = string
|
||||
default = "7"
|
||||
}
|
||||
|
||||
validation {
|
||||
condition = contains(["6", "7"], var.redis_version)
|
||||
error_message = "Redis version must be 6 or 7."
|
||||
}
|
||||
variable "redis_password" {
|
||||
description = "Redis AUTH password."
|
||||
type = string
|
||||
default = "devpassword"
|
||||
sensitive = true
|
||||
}
|
||||
|
||||
variable "redis_max_memory" {
|
||||
description = "Redis maxmemory value (e.g. 256mb)."
|
||||
type = string
|
||||
default = "512mb"
|
||||
}
|
||||
|
||||
variable "qdrant_version" {
|
||||
description = "Qdrant vector database version"
|
||||
description = "Qdrant image tag."
|
||||
type = string
|
||||
default = "latest"
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Service Configuration
|
||||
# =============================================================================
|
||||
|
||||
variable "postgres_password" {
|
||||
description = "PostgreSQL postgres user password"
|
||||
type = string
|
||||
default = "devpassword"
|
||||
sensitive = true
|
||||
|
||||
validation {
|
||||
condition = length(var.postgres_password) >= 8
|
||||
error_message = "PostgreSQL password must be at least 8 characters long."
|
||||
}
|
||||
}
|
||||
|
||||
variable "redis_password" {
|
||||
description = "Redis authentication password"
|
||||
type = string
|
||||
default = "devpassword"
|
||||
sensitive = true
|
||||
|
||||
validation {
|
||||
condition = length(var.redis_password) >= 8
|
||||
error_message = "Redis password must be at least 8 characters long."
|
||||
}
|
||||
}
|
||||
|
||||
variable "postgres_max_connections" {
|
||||
description = "Maximum PostgreSQL connections"
|
||||
type = number
|
||||
default = 100
|
||||
|
||||
validation {
|
||||
condition = var.postgres_max_connections >= 20 && var.postgres_max_connections <= 1000
|
||||
error_message = "PostgreSQL max connections must be between 20 and 1000."
|
||||
}
|
||||
}
|
||||
|
||||
variable "redis_max_memory" {
|
||||
description = "Redis maximum memory (e.g., '256mb', '1gb')"
|
||||
type = string
|
||||
default = "512mb"
|
||||
|
||||
validation {
|
||||
condition = can(regex("^[0-9]+[kmg]b$", var.redis_max_memory))
|
||||
error_message = "Redis max memory must be in format like '256mb' or '1gb'."
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Network Configuration
|
||||
# =============================================================================
|
||||
|
||||
|
||||
variable "pgadmin_port" {
|
||||
description = "pgAdmin web interface port"
|
||||
type = number
|
||||
default = 5050
|
||||
|
||||
validation {
|
||||
condition = var.pgadmin_port >= 1024 && var.pgadmin_port <= 65535
|
||||
error_message = "pgAdmin port must be between 1024 and 65535."
|
||||
}
|
||||
}
|
||||
|
||||
variable "pgadmin_email" {
|
||||
description = "pgAdmin login email"
|
||||
description = "pgAdmin login email."
|
||||
type = string
|
||||
default = "admin@example.com"
|
||||
|
||||
validation {
|
||||
condition = can(regex("^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$", var.pgadmin_email))
|
||||
error_message = "pgAdmin email must be a valid email address."
|
||||
}
|
||||
default = "admin@dev.local"
|
||||
}
|
||||
|
||||
variable "pgadmin_password" {
|
||||
description = "pgAdmin login password"
|
||||
description = "pgAdmin login password."
|
||||
type = string
|
||||
default = "adminpassword"
|
||||
sensitive = true
|
||||
|
||||
validation {
|
||||
condition = length(var.pgadmin_password) >= 8
|
||||
error_message = "pgAdmin password must be at least 8 characters long."
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Development Packages
|
||||
# =============================================================================
|
||||
|
||||
variable "npm_packages" {
|
||||
description = "Global npm packages to install"
|
||||
type = list(string)
|
||||
default = [
|
||||
"repomix", # Repository packaging tool
|
||||
"create-next-app", # Next.js app generator
|
||||
"nodemon", # Development server auto-reload
|
||||
"concurrently", # Run multiple commands
|
||||
"@types/node", # Node.js TypeScript types
|
||||
"typescript", # TypeScript compiler
|
||||
"eslint", # JavaScript linter
|
||||
"prettier" # Code formatter
|
||||
]
|
||||
}
|
||||
|
||||
variable "python_packages" {
|
||||
description = "Python packages to install via uv"
|
||||
type = list(string)
|
||||
default = [
|
||||
"fastapi", # Modern web framework
|
||||
"uvicorn", # ASGI server
|
||||
"requests", # HTTP library
|
||||
"pandas", # Data manipulation
|
||||
"numpy", # Numerical computing
|
||||
"psycopg2-binary", # PostgreSQL adapter
|
||||
"redis", # Redis client
|
||||
"qdrant-client", # Qdrant vector database client
|
||||
"python-dotenv" # Environment variable loading
|
||||
]
|
||||
}
|
||||
|
||||
variable "system_packages" {
|
||||
description = "Additional system packages to install"
|
||||
type = list(string)
|
||||
default = [
|
||||
"make", # Build tool
|
||||
"tree", # Directory tree viewer
|
||||
"jq", # JSON processor
|
||||
"curl", # HTTP client
|
||||
"wget", # File downloader
|
||||
"unzip", # Archive extractor
|
||||
"build-essential" # Compilation tools
|
||||
]
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# AI Development Tools
|
||||
# =============================================================================
|
||||
|
||||
variable "install_claude_code" {
|
||||
description = "Install Claude Code CLI for AI assistance"
|
||||
description = "Install the Claude CLI helper when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "install_cursor_support" {
|
||||
description = "Install Cursor IDE support and extensions"
|
||||
description = "Install Cursor configuration when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "install_windsurf_support" {
|
||||
description = "Install Windsurf IDE support"
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Performance Configuration
|
||||
# =============================================================================
|
||||
|
||||
variable "workspace_memory_limit" {
|
||||
description = "Memory limit for workspace container (MB)"
|
||||
type = number
|
||||
default = 8192
|
||||
|
||||
validation {
|
||||
condition = var.workspace_memory_limit >= 2048 && var.workspace_memory_limit <= 32768
|
||||
error_message = "Workspace memory limit must be between 2048MB (2GB) and 32768MB (32GB)."
|
||||
}
|
||||
}
|
||||
|
||||
variable "workspace_cpu_limit" {
|
||||
description = "CPU limit for workspace container (cores)"
|
||||
type = number
|
||||
default = 4
|
||||
|
||||
validation {
|
||||
condition = var.workspace_cpu_limit >= 1 && var.workspace_cpu_limit <= 16
|
||||
error_message = "Workspace CPU limit must be between 1 and 16 cores."
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Feature Toggles
|
||||
# =============================================================================
|
||||
|
||||
variable "enable_pgadmin" {
|
||||
description = "Enable pgAdmin web interface (resource intensive)"
|
||||
description = "Install Windsurf configuration when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "enable_monitoring" {
|
||||
description = "Enable container monitoring and metrics collection"
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "enable_jupyter" {
|
||||
description = "Enable Jupyter Lab for data science workflows"
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "enable_docker_in_docker" {
|
||||
description = "Enable Docker-in-Docker by mounting the Docker socket (Consider using Sysbox for better security)"
|
||||
variable "install_codex_support" {
|
||||
description = "Install OpenAI Codex CLI when AI tooling is enabled."
|
||||
type = bool
|
||||
default = true
|
||||
}
|
||||
|
||||
variable "use_sysbox_runtime" {
|
||||
description = "Use Sysbox runtime for secure container isolation (requires Sysbox installed on host)"
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
variable "block_file_transfer" {
|
||||
description = "Block file transfer commands (scp, rsync, ftp, nc) to prevent data exfiltration"
|
||||
type = bool
|
||||
default = false
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Common Tags
|
||||
# =============================================================================
|
||||
|
||||
variable "common_tags" {
|
||||
description = "Common tags to apply to all resources"
|
||||
type = map(string)
|
||||
default = {
|
||||
Environment = "development"
|
||||
ManagedBy = "terraform"
|
||||
Purpose = "remote-development"
|
||||
}
|
||||
}
|
||||
254
tf/workspace.tf
254
tf/workspace.tf
@@ -1,157 +1,121 @@
|
||||
# =============================================================================
|
||||
# Development Workspace Container
|
||||
# Main development environment with all required tools
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# Coder Agent - Workspace Management
|
||||
# =============================================================================
|
||||
|
||||
resource "coder_agent" "main" {
|
||||
arch = data.coder_provisioner.me.arch
|
||||
os = "linux"
|
||||
dir = "/workspaces"
|
||||
|
||||
# Environment variables for development
|
||||
env = {
|
||||
"GIT_AUTHOR_NAME" = local.git_author_name
|
||||
"GIT_AUTHOR_EMAIL" = local.git_author_email
|
||||
"GIT_COMMITTER_NAME" = local.git_author_name
|
||||
"GIT_COMMITTER_EMAIL" = local.git_author_email
|
||||
"NODE_VERSION" = var.node_version
|
||||
"PYTHON_VERSION" = var.python_version
|
||||
"PATH" = "$PATH:/home/coder/bin:/home/coder/.cargo/bin:/home/coder/.local/bin:/usr/local/bin"
|
||||
"HOME" = "/home/coder"
|
||||
"USER" = "coder"
|
||||
# Suppress NVM symlink warnings
|
||||
"NVM_SYMLINK_CURRENT" = "false"
|
||||
# Workspace ID for scripts
|
||||
"CODER_WORKSPACE_ID" = local.workspace_id
|
||||
# Service URLs for development
|
||||
"POSTGRES_URL" = data.coder_parameter.enable_services.value ? "postgresql://postgres:${var.postgres_password}@postgres-${local.workspace_id}:5432/postgres" : ""
|
||||
"REDIS_URL" = data.coder_parameter.enable_services.value ? "redis://:${var.redis_password}@redis-${local.workspace_id}:6379" : ""
|
||||
"QDRANT_URL" = data.coder_parameter.enable_services.value ? "http://qdrant-${local.workspace_id}:6333" : ""
|
||||
# Additional environment variables for scripts
|
||||
"ENABLE_SERVICES" = tostring(data.coder_parameter.enable_services.value)
|
||||
# Security: Block file transfer commands to prevent data exfiltration
|
||||
"GIT_AUTHOR_NAME" = local.git_author_name
|
||||
"GIT_AUTHOR_EMAIL" = local.git_author_email
|
||||
"CODER_WORKSPACE_ID" = local.workspace_id
|
||||
"CODER_WORKSPACE_REPO" = local.project_repo_url
|
||||
"POSTGRES_URL" = local.services_enabled ? local.postgres_url : ""
|
||||
"REDIS_URL" = local.services_enabled ? local.redis_url : ""
|
||||
"QDRANT_URL" = local.services_enabled ? local.qdrant_url : ""
|
||||
"ENABLE_PGADMIN" = tostring(local.pgadmin_enabled)
|
||||
"ENABLE_JUPYTER" = tostring(local.jupyter_enabled)
|
||||
"ENABLE_SERVICES" = tostring(local.services_enabled)
|
||||
"CODER_AGENT_BLOCK_FILE_TRANSFER" = var.block_file_transfer ? "1" : ""
|
||||
# Repository to clone on startup
|
||||
"CODER_WORKSPACE_REPO" = local.repo_url != "custom" ? local.repo_url : ""
|
||||
}
|
||||
|
||||
# Reference bind-mounted startup script plus service port forwarding
|
||||
startup_script = data.coder_parameter.enable_services.value ? "echo '${base64encode(local.port_forward_script)}' | base64 -d | tr -d '\\r' | bash 2>&1 | tee /tmp/startup-script.log" : "echo 'Starting workspace...'"
|
||||
startup_script = local.agent_startup
|
||||
|
||||
# Performance and resource monitoring
|
||||
metadata {
|
||||
display_name = "CPU Usage"
|
||||
key = "cpu_usage"
|
||||
script = "{ export NVM_SYMLINK_CURRENT=false; top -bn1 2>/dev/null | grep 'Cpu(s)' | awk '{print $2 \"%\"}' || echo 'N/A'; } 2>/dev/null"
|
||||
interval = 60
|
||||
timeout = 10
|
||||
key = "0_cpu_usage"
|
||||
script = "coder stat cpu 2>/dev/null || echo 'n/a'"
|
||||
interval = 30
|
||||
timeout = 5
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "RAM Usage"
|
||||
key = "ram_usage"
|
||||
script = "{ export NVM_SYMLINK_CURRENT=false; free 2>/dev/null | grep Mem | awk '{printf \"%d%%\", int($3/$2 * 100)}' || echo 'N/A'; } 2>/dev/null"
|
||||
interval = 60
|
||||
timeout = 10
|
||||
display_name = "Memory Usage"
|
||||
key = "1_memory_usage"
|
||||
script = "coder stat mem 2>/dev/null || echo 'n/a'"
|
||||
interval = 30
|
||||
timeout = 5
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Disk Usage"
|
||||
key = "disk_usage"
|
||||
script = "{ export NVM_SYMLINK_CURRENT=false; df -h /workspaces 2>/dev/null | tail -1 | awk '{print $5}' || echo 'N/A'; } 2>&1 | head -1"
|
||||
key = "2_disk_usage"
|
||||
script = "df -h /workspaces 2>/dev/null | awk 'NR==2 {print $5}' || echo 'n/a'"
|
||||
interval = 300
|
||||
timeout = 10
|
||||
}
|
||||
|
||||
metadata {
|
||||
display_name = "Git Branch"
|
||||
key = "git_branch"
|
||||
script = "{ export NVM_SYMLINK_CURRENT=false; cd /workspaces && git branch --show-current 2>/dev/null || echo 'no-repo'; } 2>&1 | head -1"
|
||||
key = "3_git_branch"
|
||||
script = "cd /workspaces && git branch --show-current 2>/dev/null || echo 'no-repo'"
|
||||
interval = 300
|
||||
timeout = 5
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Persistent Home Volume for Development Container
|
||||
# =============================================================================
|
||||
|
||||
resource "docker_volume" "coder_home" {
|
||||
name = "coder-home-${local.workspace_id}"
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
metadata {
|
||||
display_name = "PostgreSQL"
|
||||
key = "4_postgres"
|
||||
script = local.services_enabled ? format("pg_isready -h postgres-%s -p 5432 -U postgres >/dev/null && echo healthy || echo down", local.workspace_id) : "echo 'disabled'"
|
||||
interval = 60
|
||||
timeout = 5
|
||||
}
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
|
||||
metadata {
|
||||
display_name = "Redis"
|
||||
key = "5_redis"
|
||||
script = local.services_enabled ? format("redis-cli -h redis-%s -a %s ping 2>/dev/null | grep -qi pong && echo healthy || echo down", local.workspace_id, var.redis_password) : "echo 'disabled'"
|
||||
interval = 60
|
||||
timeout = 5
|
||||
}
|
||||
labels {
|
||||
label = "coder.type"
|
||||
value = "home-directory"
|
||||
|
||||
metadata {
|
||||
display_name = "Qdrant"
|
||||
key = "6_qdrant"
|
||||
script = local.services_enabled ? format("wget --no-verbose --tries=1 --spider http://qdrant-%s:6333/health 2>/dev/null && echo healthy || echo down", local.workspace_id) : "echo 'disabled'"
|
||||
interval = 60
|
||||
timeout = 5
|
||||
}
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# Main Development Container
|
||||
# =============================================================================
|
||||
|
||||
resource "docker_container" "workspace" {
|
||||
count = data.coder_workspace.me.start_count
|
||||
image = docker_image.devcontainer.image_id
|
||||
name = local.container_name
|
||||
hostname = data.coder_workspace.me.name
|
||||
|
||||
# Container resource limits
|
||||
memory = var.workspace_memory_limit * 1024 * 1024 # Convert MB to bytes
|
||||
memory = var.workspace_memory_limit > 0 ? var.workspace_memory_limit * 1024 * 1024 : null
|
||||
|
||||
# Environment variables
|
||||
env = [
|
||||
env = compact([
|
||||
"GIT_AUTHOR_NAME=${local.git_author_name}",
|
||||
"GIT_AUTHOR_EMAIL=${local.git_author_email}",
|
||||
"GIT_COMMITTER_NAME=${local.git_author_name}",
|
||||
"GIT_COMMITTER_EMAIL=${local.git_author_email}",
|
||||
"NODE_VERSION=${var.node_version}",
|
||||
"PYTHON_VERSION=${var.python_version}",
|
||||
"CODER_AGENT_TOKEN=${coder_agent.main.token}"
|
||||
]
|
||||
"CODER_AGENT_TOKEN=${coder_agent.main.token}",
|
||||
"CODER_AGENT_DEVCONTAINERS_ENABLE=true",
|
||||
local.project_repo_url != "" ? "CODER_WORKSPACE_REPO=${local.project_repo_url}" : "",
|
||||
])
|
||||
|
||||
# Network configuration
|
||||
networks_advanced {
|
||||
name = docker_network.workspace.name
|
||||
}
|
||||
|
||||
# Host networking for Docker-in-Docker and reverse proxy support
|
||||
host {
|
||||
host = "host.docker.internal"
|
||||
ip = "host-gateway"
|
||||
}
|
||||
|
||||
# No port mappings needed - reverse proxy will handle routing
|
||||
# All services run within the isolated workspace network
|
||||
# Coder's port forwarding and apps will provide access via reverse proxy
|
||||
|
||||
|
||||
# Volume mounts
|
||||
volumes {
|
||||
container_path = "/workspaces"
|
||||
volume_name = docker_volume.workspaces.name
|
||||
read_only = false
|
||||
}
|
||||
|
||||
# Mount a dynamically created home volume for user data persistence
|
||||
volumes {
|
||||
container_path = "/home/coder"
|
||||
volume_name = docker_volume.coder_home.name
|
||||
read_only = false
|
||||
|
||||
dynamic "volumes" {
|
||||
for_each = local.bind_mounts
|
||||
content {
|
||||
host_path = volumes.value.host
|
||||
container_path = volumes.value.container
|
||||
read_only = try(volumes.value.read_only, false)
|
||||
}
|
||||
}
|
||||
|
||||
# Docker socket for Docker-in-Docker (optional)
|
||||
dynamic "volumes" {
|
||||
for_each = var.enable_docker_in_docker ? [1] : []
|
||||
content {
|
||||
@@ -160,84 +124,62 @@ resource "docker_container" "workspace" {
|
||||
}
|
||||
}
|
||||
|
||||
# Working directory
|
||||
working_dir = "/workspaces"
|
||||
command = ["/bin/bash", "-c", "${coder_agent.main.init_script} && sleep infinity"]
|
||||
|
||||
# Keep container running
|
||||
command = ["/bin/bash", "-c", "${coder_agent.main.init_script} && sleep infinity"]
|
||||
|
||||
# Container labels for management
|
||||
labels {
|
||||
label = "coder.owner"
|
||||
value = data.coder_workspace_owner.me.name
|
||||
}
|
||||
|
||||
labels {
|
||||
label = "coder.workspace_id"
|
||||
value = local.workspace_id
|
||||
}
|
||||
labels {
|
||||
label = "coder.project"
|
||||
value = var.project_name
|
||||
}
|
||||
|
||||
# Dependencies
|
||||
depends_on = [
|
||||
docker_network.workspace,
|
||||
docker_volume.workspaces,
|
||||
docker_volume.coder_home,
|
||||
docker_image.devcontainer
|
||||
]
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
# JetBrains Gateway Integration
|
||||
# =============================================================================
|
||||
module "cursor_desktop" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/cursor/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
folder = "/workspaces"
|
||||
group = "Desktop IDEs"
|
||||
order = 40
|
||||
}
|
||||
|
||||
module "jetbrains_gateway" {
|
||||
count = data.coder_parameter.enable_jetbrains.value && data.coder_workspace.me.start_count > 0 ? 1 : 0
|
||||
source = "registry.coder.com/modules/jetbrains-gateway/coder"
|
||||
version = "1.0.29"
|
||||
agent_id = coder_agent.main.id
|
||||
folder = "/workspaces"
|
||||
jetbrains_ides = ["IU", "WS", "PY", "GO"]
|
||||
default = "IU"
|
||||
latest = false
|
||||
jetbrains_ide_versions = {
|
||||
"IU" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
"WS" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
"PY" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
"GO" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
"CL" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
"PS" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
"RR" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
"RM" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
"RD" = {
|
||||
build_number = "251.25410.129"
|
||||
version = "2025.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
module "windsurf_desktop" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/windsurf/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
folder = "/workspaces"
|
||||
group = "Desktop IDEs"
|
||||
order = 50
|
||||
}
|
||||
|
||||
module "pycharm_desktop" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_jetbrains.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/jetbrains-gateway/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
folder = "/workspaces"
|
||||
jetbrains_ides = ["PY"]
|
||||
default = "PY"
|
||||
group = "Desktop IDEs"
|
||||
order = 60
|
||||
coder_parameter_order = 6
|
||||
slug = "pycharm-gateway"
|
||||
}
|
||||
|
||||
module "claude_code" {
|
||||
count = data.coder_workspace.me.start_count > 0 && data.coder_parameter.enable_ai_tools.value ? 1 : 0
|
||||
source = "registry.coder.com/coder/claude-code/coder"
|
||||
agent_id = coder_agent.main.id
|
||||
workdir = "/workspaces"
|
||||
group = "AI Tools"
|
||||
order = 30
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user