Compare commits

..

1 Commits

Author SHA1 Message Date
David Barroso
efb58f6565 sad 2025-10-06 11:57:42 +02:00
3341 changed files with 73861 additions and 306288 deletions

View File

@@ -7,8 +7,6 @@ assignees: ''
---
> **Note:** Bug reports that are clearly AI-generated will not be accepted and will be closed immediately. Please write your bug report in your own words.
**Describe the bug**
A clear and concise description of what the bug is.

View File

@@ -7,8 +7,6 @@ assignees: ''
---
> **Note:** Feature requests that are clearly AI-generated will not be accepted and will be closed immediately. Please write your feature request in your own words.
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]

View File

@@ -8,8 +8,6 @@
--- Delete everything below this line before submitting your PR ---
> **Note on AI-assisted contributions:** Contributions with the help of AI are permitted, but you are ultimately responsible for the quality of your submission and for ensuring it follows our contributing guidelines. **The PR description must be written in your own words and be clear and concise**. Please ensure you remove any superfluous code comments introduced by AI tools before submitting. PRs that clearly violate this rule will be closed without further review.
### PR title format
The PR title must follow the following pattern:
@@ -24,7 +22,6 @@ Where `TYPE` is:
Where `PKG` is:
- `auth`: For changes to the Nhost Auth service
- `ci`: For general changes to the build and/or CI/CD pipeline
- `cli`: For changes to the Nhost CLI
- `codegen`: For changes to the code generator
@@ -32,11 +29,10 @@ Where `PKG` is:
- `deps`: For changes to dependencies
- `docs`: For changes to the documentation
- `examples`: For changes to the examples
- `internal/lib`: For changes to Nhost's common libraries (internal)
- `mintlify-openapi`: For changes to the Mintlify OpenAPI tool
- `nhost-js`: For changes to the Nhost JavaScript SDK
- `nixops`: For changes to the NixOps
- `storage`: For changes to the Nhost Storage service
- `storage`: For changes to the Nhost Storage
Where `SUMMARY` is a short description of what the PR does.

View File

@@ -17,7 +17,7 @@ runs:
# Define valid types and packages
VALID_TYPES="feat|fix|chore"
VALID_PKGS="auth|ci|cli|codegen|dashboard|deps|docs|examples|internal\/lib|mintlify-openapi|nhost-js|nixops|storage"
VALID_PKGS="ci|cli|codegen|dashboard|deps|docs|examples|mintlify-openapi|nhost-js|nixops|storage"
# Check if title matches the pattern TYPE(PKG): SUMMARY
if [[ ! "$PR_TITLE" =~ ^(${VALID_TYPES})\((${VALID_PKGS})\):\ .+ ]]; then
@@ -31,11 +31,11 @@ runs:
echo " - chore: mark this pull request as a maintenance item"
echo ""
echo "Valid PKGs:"
echo " - auth, ci, cli, codegen, dashboard, deps, docs, examples,"
echo " - ci, cli, codegen, dashboard, deps, docs, examples,"
echo " - mintlify-openapi, nhost-js, nixops, storage"
echo ""
echo "Example: feat(cli): add new command for database migrations"
exit 1
fi
echo "✅ PR title is valid!"
echo "✅ PR title is valid!"

View File

@@ -1,84 +0,0 @@
---
name: "auth: check and build"
on:
pull_request_target:
paths:
- '.github/workflows/auth_checks.yaml'
- '.github/workflows/wf_check.yaml'
- '.github/workflows/wf_build_artifacts.yaml'
# common build
- 'flake.nix'
- 'flake.lock'
- 'nixops/**'
- 'build/**'
# common go
- '.golangci.yaml'
- 'go.mod'
- 'go.sum'
- 'internal/lib/**'
- 'vendor/**'
# auth
- 'services/auth/**'
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
jobs:
check-permissions:
runs-on: ubuntu-latest
steps:
- run: |
echo "github.event_name: ${{ github.event_name }}"
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
run: |
exit 1
tests:
uses: ./.github/workflows/wf_check.yaml
needs:
- check-permissions
with:
NAME: auth
PATH: services/auth
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
NHOST_PAT: ${{ secrets.NHOST_PAT }}
build_artifacts:
uses: ./.github/workflows/wf_build_artifacts.yaml
needs:
- check-permissions
with:
NAME: auth
PATH: services/auth
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: true
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
remove_label:
runs-on: ubuntu-latest
needs:
- check-permissions
steps:
- uses: actions-ecosystem/action-remove-labels@v1
with:
labels: |
safe_to_test
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')

View File

@@ -1,60 +0,0 @@
---
name: "auth: release"
on:
workflow_call:
inputs:
GIT_REF:
required: true
type: string
VERSION:
required: true
type: string
secrets:
AWS_ACCOUNT_ID:
required: true
NIX_CACHE_PUB_KEY:
required: true
NIX_CACHE_PRIV_KEY:
required: true
DOCKER_USERNAME:
required: true
DOCKER_PASSWORD:
required: true
jobs:
build_artifacts:
uses: ./.github/workflows/wf_build_artifacts.yaml
with:
NAME: auth
PATH: services/auth
GIT_REF: ${{ inputs.GIT_REF }}
VERSION: ${{ inputs.VERSION }}
DOCKER: true
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
push-docker-hub:
uses: ./.github/workflows/wf_docker_push_image.yaml
needs:
- build_artifacts
with:
NAME: auth
PATH: services/auth
VERSION: ${{ inputs.VERSION }}
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
push-docker-ecr:
uses: ./.github/workflows/wf_docker_push_image_ecr.yaml
needs:
- build_artifacts
with:
NAME: auth
PATH: services/auth
VERSION: ${{ inputs.VERSION }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
CONTAINER_REGISTRY: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.eu-central-1.amazonaws.com

View File

@@ -33,20 +33,6 @@ jobs:
echo "version=$VERSION" >> $GITHUB_OUTPUT
echo "Extracted project: $PROJECT, version: $VERSION"
auth:
needs: extract-project
if: needs.extract-project.outputs.project == 'auth'
uses: ./.github/workflows/auth_wf_release.yaml
with:
GIT_REF: ${{ github.sha }}
VERSION: ${{ needs.extract-project.outputs.version }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
cli:
needs: extract-project
if: needs.extract-project.outputs.project == 'cli'

View File

@@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
project: [cli, dashboard, packages/nhost-js, services/auth, services/storage]
project: [cli, dashboard, packages/nhost-js, services/storage]
permissions:
id-token: write
@@ -40,7 +40,7 @@ jobs:
cd ${{ matrix.project }}
TAG_NAME=$(make release-tag-name)
VERSION=$(nix develop .\#cliff -c make changelog-next-version)
if git tag | grep -qx "$TAG_NAME@$VERSION"; then
if git tag | grep -q "$TAG_NAME@$VERSION"; then
echo "Tag $TAG_NAME@$VERSION already exists, skipping release preparation"
else
echo "Tag $TAG_NAME@$VERSION does not exist, proceeding with release preparation"

View File

@@ -1,7 +1,8 @@
---
name: "cli: check and build"
on:
pull_request_target:
# pull_request_target:
pull_request:
paths:
- '.github/workflows/cli_checks.yaml'
- '.github/workflows/wf_check.yaml'
@@ -49,7 +50,7 @@ jobs:
with:
NAME: cli
PATH: cli
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -64,7 +65,7 @@ jobs:
with:
NAME: cli
PATH: cli
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: true
secrets:
@@ -80,7 +81,7 @@ jobs:
with:
NAME: cli
PATH: cli
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}

View File

@@ -63,7 +63,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: "Get artifacts"
uses: actions/download-artifact@v6
uses: actions/download-artifact@v5
with:
path: ~/artifacts

View File

@@ -1,7 +1,8 @@
---
name: "codegen: check and build"
on:
pull_request_target:
# pull_request_target:
pull_request:
paths:
- '.github/workflows/wf_check.yaml'
- '.github/workflows/codegen_checks.yaml'
@@ -47,7 +48,7 @@ jobs:
with:
NAME: codegen
PATH: tools/codegen
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -61,7 +62,7 @@ jobs:
with:
NAME: codegen
PATH: tools/codegen
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: false
secrets:

View File

@@ -1,7 +1,7 @@
---
name: "dashboard: check and build"
on:
pull_request_target:
pull_request:
paths:
- '.github/workflows/wf_build_artifacts.yaml'
- '.github/workflows/wf_check.yaml'
@@ -54,7 +54,7 @@ jobs:
- check-permissions
with:
NAME: dashboard
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
ENVIRONMENT: preview
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -73,7 +73,7 @@ jobs:
with:
NAME: dashboard
PATH: dashboard
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: true
OS_MATRIX: '["blacksmith-2vcpu-ubuntu-2404"]'
@@ -91,7 +91,7 @@ jobs:
with:
NAME: dashboard
PATH: dashboard
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
@@ -107,7 +107,7 @@ jobs:
with:
NAME: dashboard
PATH: dashboard
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
NHOST_TEST_DASHBOARD_URL: ${{ needs.deploy-vercel.outputs.preview-url }}
NHOST_TEST_PROJECT_NAME: ${{ vars.NHOST_TEST_PROJECT_NAME }}
NHOST_TEST_ORGANIZATION_NAME: ${{ vars.NHOST_TEST_ORGANIZATION_NAME }}
@@ -126,10 +126,8 @@ jobs:
NHOST_TEST_USER_EMAIL: ${{ secrets.NHOST_TEST_USER_EMAIL }}
NHOST_TEST_USER_PASSWORD: ${{ secrets.NHOST_TEST_USER_PASSWORD }}
NHOST_TEST_PROJECT_ADMIN_SECRET: ${{ secrets.NHOST_TEST_PROJECT_ADMIN_SECRET }}
NHOST_TEST_ONBOARDING_USER: ${{ secrets.NHOST_TEST_ONBOARDING_USER }}
NHOST_TEST_FREE_USER_EMAILS: ${{ secrets.NHOST_TEST_FREE_USER_EMAILS }}
PLAYWRIGHT_REPORT_ENCRYPTION_KEY: ${{ secrets.PLAYWRIGHT_REPORT_ENCRYPTION_KEY }}
NHOST_TEST_STAGING_SUBDOMAIN: ${{ secrets.NHOST_TEST_STAGING_SUBDOMAIN }}
NHOST_TEST_STAGING_REGION: ${{ secrets.NHOST_TEST_STAGING_REGION }}
remove_label:
runs-on: ubuntu-latest

View File

@@ -52,16 +52,12 @@ on:
required: true
NHOST_TEST_USER_PASSWORD:
required: true
NHOST_TEST_ONBOARDING_USER:
required: true
NHOST_TEST_PROJECT_ADMIN_SECRET:
required: true
NHOST_TEST_FREE_USER_EMAILS:
required: true
PLAYWRIGHT_REPORT_ENCRYPTION_KEY:
required: true
NHOST_TEST_STAGING_SUBDOMAIN:
required: true
NHOST_TEST_STAGING_REGION:
required: true
concurrency:
group: dashboard-e2e-staging
@@ -81,10 +77,7 @@ env:
NHOST_TEST_USER_EMAIL: ${{ secrets.NHOST_TEST_USER_EMAIL }}
NHOST_TEST_USER_PASSWORD: ${{ secrets.NHOST_TEST_USER_PASSWORD }}
NHOST_TEST_PROJECT_ADMIN_SECRET: ${{ secrets.NHOST_TEST_PROJECT_ADMIN_SECRET }}
NHOST_TEST_ONBOARDING_USER: ${{ secrets.NHOST_TEST_ONBOARDING_USER }}
NHOST_TEST_STAGING_SUBDOMAIN: ${{ secrets.NHOST_TEST_STAGING_SUBDOMAIN }}
NHOST_TEST_STAGING_REGION: ${{ secrets.NHOST_TEST_STAGING_REGION }}
NHOST_TEST_FREE_USER_EMAILS: ${{ secrets.NHOST_TEST_FREE_USER_EMAILS }}
jobs:
tests:
@@ -148,7 +141,7 @@ jobs:
rm playwright-report.tar.gz
- name: Upload encrypted Playwright report
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
if: failure()
with:
name: encrypted-playwright-report-${{ github.run_id }}

View File

@@ -88,7 +88,6 @@ jobs:
- name: Bump version in source code
run: |
find cli -type f -exec sed -i 's/"nhost\/dashboard:[^"]*"/"nhost\/dashboard:${{ inputs.VERSION }}"/g' {} +
sed -i 's/nhost\/dashboard:[^)]*/nhost\/dashboard:${{ inputs.VERSION }}/g' docs/reference/cli/commands.mdx
- name: "Create Pull Request"
uses: peter-evans/create-pull-request@v7

View File

@@ -1,7 +1,7 @@
---
name: "docs: check and build"
on:
pull_request_target:
pull_request:
paths:
- '.github/workflows/wf_check.yaml'
- '.github/workflows/dashboard_checks.yaml'
@@ -28,10 +28,6 @@ on:
# nhost-js
- packages/nhost-js/**
# apis
- 'services/auth/docs/openapi.yaml'
- 'services/storage/controller/openapi.yaml'
# cli
- cli/**
push:
@@ -62,7 +58,7 @@ jobs:
with:
NAME: docs
PATH: docs
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}

View File

@@ -1,7 +1,8 @@
---
name: "examples/demos: check and build"
on:
pull_request_target:
# pull_request_target:
pull_request:
paths:
- '.github/workflows/wf_check.yaml'
- '.github/workflows/examples_demos_checks.yaml'
@@ -63,7 +64,7 @@ jobs:
with:
NAME: demos
PATH: examples/demos
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -77,7 +78,7 @@ jobs:
with:
NAME: demos
PATH: examples/demos
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: false
OS_MATRIX: '["blacksmith-2vcpu-ubuntu-2404"]'

View File

@@ -1,7 +1,8 @@
---
name: "examples/guides: check and build"
on:
pull_request_target:
# pull_request_target:
pull_request:
paths:
- '.github/workflows/wf_check.yaml'
- '.github/workflows/examples_guides_checks.yaml'
@@ -63,7 +64,7 @@ jobs:
with:
NAME: guides
PATH: examples/guides
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -77,7 +78,7 @@ jobs:
with:
NAME: guides
PATH: examples/guides
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: false
OS_MATRIX: '["blacksmith-2vcpu-ubuntu-2404"]'

View File

@@ -1,7 +1,8 @@
---
name: "examples/tutorials: check and build"
on:
pull_request_target:
# pull_request_target:
pull_request:
paths:
- '.github/workflows/wf_check.yaml'
- '.github/workflows/examples_tutorials_checks.yaml'
@@ -63,7 +64,7 @@ jobs:
with:
NAME: tutorials
PATH: examples/tutorials
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -77,7 +78,7 @@ jobs:
with:
NAME: tutorials
PATH: examples/tutorials
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: false
OS_MATRIX: '["blacksmith-2vcpu-ubuntu-2404"]'

View File

@@ -1,7 +1,7 @@
---
name: "gen: AI review"
on:
pull_request_target:
pull_request:
types: [opened, reopened, ready_for_review]
issue_comment:
jobs:
@@ -16,7 +16,7 @@ jobs:
steps:
- name: PR Agent action step
id: pragent
uses: Codium-ai/pr-agent@v0.31
uses: Codium-ai/pr-agent@v0.30
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENAI_KEY: ${{ secrets.OPENAI_API_KEY }}
@@ -24,5 +24,4 @@ jobs:
config.model: ${{ vars.GEN_AI_MODEL }}
config.model_turbo: $${{ vars.GEN_AI_MODEL_TURBO }}
config.max_model_tokens: 200000
config.custom_model_max_tokens: 200000
ignore.glob: "['pnpm-lock.yaml','**/pnpm-lock.yaml', 'vendor/**','**/client_gen.go','**/models_gen.go','**/generated.go','**/*.gen.go']"

View File

@@ -26,7 +26,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v4
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -37,7 +37,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v4
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -51,4 +51,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v4
uses: github/codeql-action/analyze@v3

View File

@@ -1,7 +1,8 @@
---
name: "nhost-js: check and build"
on:
pull_request_target:
# pull_request_target:
pull_request:
paths:
- '.github/workflows/wf_check.yaml'
- '.github/workflows/nhost-js_checks.yaml'
@@ -33,10 +34,6 @@ on:
# nhost-js
- 'packages/nhost-js/**'
# apis
- 'services/auth/docs/openapi.yaml'
- 'services/storage/controller/openapi.yaml'
push:
branches:
- main
@@ -64,7 +61,7 @@ jobs:
with:
NAME: nhost-js
PATH: packages/nhost-js
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -78,7 +75,7 @@ jobs:
with:
NAME: nhost-js
PATH: packages/nhost-js
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: false
secrets:

View File

@@ -1,7 +1,8 @@
---
name: "nixops: check and build"
on:
pull_request_target:
# pull_request_target:
pull_request:
paths:
- '.github/workflows/wf_check.yaml'
- '.github/workflows/nixops_checks.yaml'
@@ -39,7 +40,7 @@ jobs:
with:
NAME: nixops
PATH: nixops
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -53,9 +54,9 @@ jobs:
with:
NAME: nixops
PATH: nixops
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: true
DOCKER: false
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}

View File

@@ -1,35 +0,0 @@
---
name: "nixops: release"
on:
push:
branches:
- main
paths:
- 'flake.lock'
- 'nixops/project.nix'
jobs:
build_artifacts:
uses: ./.github/workflows/wf_build_artifacts.yaml
with:
NAME: nixops
PATH: nixops
GIT_REF: ${{ inputs.GIT_REF }}
VERSION: latest
DOCKER: true
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
push-docker:
uses: ./.github/workflows/wf_docker_push_image.yaml
needs:
- build_artifacts
with:
NAME: nixops
PATH: nixops
VERSION: latest
secrets:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}

View File

@@ -1,7 +1,8 @@
---
name: "storage: check and build"
on:
pull_request_target:
# pull_request_target:
pull_request:
paths:
- '.github/workflows/storage_checks.yaml'
- '.github/workflows/wf_check.yaml'
@@ -17,7 +18,6 @@ on:
- '.golangci.yaml'
- 'go.mod'
- 'go.sum'
- 'internal/lib/**'
- 'vendor/**'
# storage
@@ -49,7 +49,7 @@ jobs:
with:
NAME: storage
PATH: services/storage
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
@@ -64,7 +64,7 @@ jobs:
with:
NAME: storage
PATH: services/storage
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
GIT_REF: ${{ github.sha }}
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
DOCKER: true
secrets:

View File

@@ -85,7 +85,7 @@ jobs:
zip -r result.zip result
- name: "Push artifact to artifact repository"
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.NAME }}-artifact-${{ steps.vars.outputs.ARCH }}-${{ steps.vars.outputs.VERSION }}
path: ${{ inputs.PATH }}/result.zip
@@ -100,7 +100,7 @@ jobs:
if: ${{ ( inputs.DOCKER ) }}
- name: "Push docker image to artifact repository"
uses: actions/upload-artifact@v5
uses: actions/upload-artifact@v4
with:
name: ${{ inputs.NAME }}-docker-image-${{ steps.vars.outputs.ARCH }}-${{ steps.vars.outputs.VERSION }}
path: ${{ inputs.PATH }}/result

View File

@@ -44,7 +44,7 @@ jobs:
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
- name: "Get artifacts"
uses: actions/download-artifact@v6
uses: actions/download-artifact@v5
with:
path: ~/artifacts

View File

@@ -55,7 +55,7 @@ jobs:
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
- name: "Get artifacts"
uses: actions/download-artifact@v6
uses: actions/download-artifact@v5
with:
path: ~/artifacts

View File

@@ -32,7 +32,6 @@ linters:
- linters:
- funlen
- ireturn
- goconst
path: _test\.go
- linters:
- lll

View File

@@ -16,15 +16,6 @@ Contributions are made to Nhost repos via Issues and Pull Requests (PRs). A few
- We work hard to make sure issues are handled on time, but it could take a while to investigate the root cause depending on the impact. A friendly ping in the comment thread to the submitter or a contributor can help draw attention if your issue is blocking.
- If you've never contributed before, see [the first-timer's guide](https://github.com/firstcontributions/first-contributions) for resources and tips on getting started.
### AI-Assisted Contributions
We have specific policies regarding AI-assisted contributions:
- **Issues**: Bug reports and feature requests that are clearly AI-generated will not be accepted and will be closed immediately. Please write your issues in your own words to ensure they are clear, specific, and contain the necessary context.
- **Pull Requests**: Contributions with the help of AI are permitted, but you are ultimately responsible for the quality of your submission and for ensuring it follows our contributing guidelines. The PR description must be written in your own words. Additionally, please remove any superfluous code comments introduced by AI tools before submitting. PRs that clearly violate this rule will be closed without further review.
In all cases, contributors must ensure their submissions are thoughtful, well-tested, and meet the project's quality standards.
### Issues
Issues should be used to report problems with Nhost, request a new feature, or discuss potential changes before a PR is created.
@@ -33,20 +24,28 @@ If you find an Issue that addresses the problem you're having, please add your r
### Pull Requests
Please have a look at our [developers guide](https://github.com/nhost/nhost/blob/main/DEVELOPERS.md) to start coding!
PRs to our libraries are always welcome and can be a quick way to get your fix or improvement slated for the next release. In general, PRs should:
## Monorepo Structure
- Only fix/add the functionality in question **OR** address wide-spread whitespace/style issues, not both.
- Add unit or integration tests for fixed or changed functionality (if a test suite exists).
- Address a single concern in the least number of changed lines as possible.
- Include documentation in the repo or on our [docs site](https://docs.nhost.io).
- Be accompanied by a complete Pull Request template (loaded automatically when a PR is created).
This repository is a monorepo that contains multiple packages and applications. The structure is as follows:
For changes that address core functionality or require breaking changes (e.g., a major release), it's best to open an Issue to discuss your proposal first. This is not required but can save time creating and reviewing changes.
- `cli` - The Nhost CLI
- `dashboard` - The Nhost Dashboard
- `docs` - Documentation
- `examples` - Various example projects
- `packages/nhost-js` - The Nhost JavaScript/TypeScript SDK
- `services/auth` - Nhost Authentication service
- `services/storage` - Nhost Storage service
- `tools/codegen` - Internal code generation tool to build the SDK
- `tools/mintlify-openapi` - Internal tool to generate reference documentation for Mintlify from an OpenAPI spec.
In general, we follow the ["fork-and-pull" Git workflow](https://github.com/susam/gitpr)
For details about those projects and how to contribute, please refer to their respective `README.md` and `CONTRIBUTING.md` files.
1. Fork the repository to your own Github account
2. Clone the project to your machine
3. Create a branch locally with a succinct but descriptive name. All changes should be part of a branch and submitted as a pull request - your branches should be prefixed with one of:
- `bug/` for bug fixes
- `feat/` for features
- `chore/` for configuration changes
- `docs/` for documentation changes
4. Commit changes to the branch
5. Following any formatting and testing guidelines specific to this repo
6. Push changes to your fork
7. Open a PR in our repository and follow the PR template to review the changes efficiently.

100
DEVELOPERS.md Normal file
View File

@@ -0,0 +1,100 @@
# Developer Guide
## Requirements
### Node.js v20 or later
### [pnpm](https://pnpm.io/) package manager
The easiest way to install `pnpm` if it's not installed on your machine yet is to use `npm`:
```sh
$ npm install -g pnpm
```
### [Nhost CLI](https://docs.nhost.io/platform/cli/local-development)
- The CLI is primarily used for running the E2E tests
- Please refer to the [installation guide](https://docs.nhost.io/platform/cli/local-development) if you have not installed it yet
## File Structure
The repository is organized as a monorepo, with the following structure (only relevant folders are shown):
```
assets/ # Assets used in the README
config/ # Configuration files for the monorepo
dashboard/ # Dashboard
docs/ # Documentation website
examples/ # Example projects
packages/ # Core packages
integrations/ # These are packages that rely on the core packages
```
## Get started
### Installation
First, clone this repository:
```sh
git clone https://github.com/nhost/nhost
```
Then, install the dependencies with `pnpm`:
```sh
$ cd nhost
$ pnpm install
```
### Development
Although package references are correctly updated on the fly for TypeScript, example projects and the dashboard won't see the changes because they are depending on the build output. To fix this, you can run packages in development mode.
Running packages in development mode from the root folder is as simple as:
```sh
$ pnpm dev
```
Our packages are linked together using [PNPM's workspace](https://pnpm.io/workspaces) feature. Next.js and Vite automatically detect changes in the dependencies and rebuild everything, so the changes will be reflected in the examples and the dashboard.
**Note:** It's possible that Next.js or Vite throw an error when you run `pnpm dev`. Restarting the process should fix it.
### Use Examples
Examples are a great way to test your changes in practice. Make sure you've `pnpm dev` running in your terminal and then run an example.
Let's follow the instructions to run [react-apollo example](https://github.com/nhost/nhost/blob/main/examples/react-apollo/README.md).
## Edit Documentation
The easier way to contribute to our documentation is to go to the `docs` folder and follow the [instructions to start local development](https://github.com/nhost/nhost/blob/main/docs/README.md):
```sh
$ cd docs
# not necessary if you've already done this step somewhere in the repository
$ pnpm install
$ pnpm start
```
## Run Test Suites
### Unit Tests
You can run the unit tests with the following command from the repository root:
```sh
$ pnpm test
```
### E2E Tests
Each package that defines end-to-end tests embeds their own Nhost configuration, that will be automatically when running the tests. As a result, you must make sure you are not running the Nhost CLI before running the tests.
You can run the e2e tests with the following command from the repository root:
```sh
$ pnpm e2e
```

View File

@@ -1,16 +0,0 @@
.PHONY: envrc-install
envrc-install: ## Copy envrc.sample to all project folders
@for f in $$(find . -name "project.nix"); do \
echo "Copying envrc.sample to $$(dirname $$f)/.envrc"; \
cp ./envrc.sample $$(dirname $$f)/.envrc; \
done
.PHONY: nixops-container-env
nixops-container-env: ## Enter a NixOS container environment
docker run \
-it \
-v /var/run/docker.sock:/var/run/docker.sock \
-v ./:/build \
-w /build \
nixops:0.0.0-dev \
bash

View File

@@ -12,7 +12,7 @@
<span>&nbsp;&nbsp;•&nbsp;&nbsp;</span>
<a href="https://nhost.io/blog">Blog</a>
<span>&nbsp;&nbsp;•&nbsp;&nbsp;</span>
<a href="https://x.com/nhost">X</a>
<a href="https://twitter.com/nhost">Twitter</a>
<span>&nbsp;&nbsp;•&nbsp;&nbsp;</span>
<a href="https://nhost.io/discord">Discord</a>
<span>&nbsp;&nbsp;•&nbsp;&nbsp;</span>
@@ -33,10 +33,10 @@ Nhost consists of open source software:
- Database: [PostgreSQL](https://www.postgresql.org/)
- Instant GraphQL API: [Hasura](https://hasura.io/)
- Authentication: [Auth](https://github.com/nhost/nhost/tree/main/services/auth)
- Storage: [Storage](https://github.com/nhost/nhost/tree/main/services/storage)
- Authentication: [Hasura Auth](https://github.com/nhost/hasura-auth/)
- Storage: [Hasura Storage](https://github.com/nhost/hasura-storage)
- Serverless Functions: Node.js (JavaScript and TypeScript)
- [Nhost CLI](https://github.com/nhost/nhost/tree/main/cli) for local development
- [Nhost CLI](https://docs.nhost.io/platform/cli/local-development) for local development
## Architecture of Nhost
@@ -138,7 +138,7 @@ Here are some ways of contributing to making Nhost better:
- **[Try out Nhost](https://docs.nhost.io)**, and think of ways to make the service better. Let us know here on GitHub.
- Join our [Discord](https://discord.com/invite/9V7Qb2U) and connect with other members to share and learn from.
- Send a pull request to any of our [open source repositories](https://github.com/nhost) on Github. Check out our [contribution guide](https://github.com/nhost/nhost/blob/main/CONTRIBUTING.md) for more details about how to contribute. We're looking forward to your contribution!
- Send a pull request to any of our [open source repositories](https://github.com/nhost) on Github. Check our [contribution guide](https://github.com/nhost/nhost/blob/main/CONTRIBUTING.md) and our [developers guide](https://github.com/nhost/nhost/blob/main/DEVELOPERS.md) for more details about how to contribute. We're looking forward to your contribution!
### Contributors

View File

@@ -2,7 +2,5 @@
// $schema provides code completion hints to IDEs.
"$schema": "https://github.com/IBM/audit-ci/raw/main/docs/schema.json",
"moderate": true,
"allowlist": [
"GHSA-7mvr-c777-76hp" // https://github.com/advisories/GHSA-7mvr-c777-76hp Update package once Nix side is also updated
]
}
"allowlist": ["vue-template-compiler", { "id": "CVE-2025-48068", "path": "next" }]
}

View File

@@ -54,11 +54,6 @@ get-version: ## Return version
@echo $(VERSION)
.PHONY: develop
develop: ## Start a nix develop shell
nix develop .\#$(NAME)
.PHONY: _check-pre
_check-pre: ## Pre-checks before running nix flake check
@@ -110,11 +105,6 @@ build-docker-image: ## Build docker container for native architecture
skopeo copy --insecure-policy dir:./result docker-daemon:$(NAME):$(VERSION)
.PHONY: build-docker-image-import-bare
build-docker-image-import-bare:
skopeo copy --insecure-policy dir:./result docker-daemon:$(NAME):$(VERSION)
.PHONY: dev-env-up
dev-env-up: _dev-env-build _dev-env-up ## Starts development environment

View File

@@ -1,85 +1,7 @@
## [cli@1.34.7] - 2025-11-13
### ⚙️ Miscellaneous Tasks
- *(cli)* Bump nhost/dashboard to 2.42.0 (#3693)
## [cli@1.34.6] - 2025-11-13
### 🐛 Bug Fixes
- *(cli)* Mcp: specify items type for arrays in tools (#3687)
### ⚙️ Miscellaneous Tasks
- *(cli)* Update bindings (#3689)
## [cli@1.34.5] - 2025-11-06
### ⚙️ Miscellaneous Tasks
- *(nixops)* Bump go to 1.25.3 and nixpkgs due to CVEs (#3652)
- *(cli)* Udpate certs and schema (#3675)
- *(cli)* Bump nhost/dashboard to 2.41.0 (#3669)
# Changelog
All notable changes to this project will be documented in this file.
## [cli@1.34.4] - 2025-10-28
### 🐛 Bug Fixes
- *(cli)* Update NEXT_PUBLIC_NHOST_HASURA_MIGRATIONS_API_URL correctly (#3643)
## [cli@1.34.3] - 2025-10-27
### ⚙️ Miscellaneous Tasks
- *(cli)* Update schema (#3622)
- *(cli)* Bump nhost/dashboard to 2.40.0 (#3629)
## [cli@1.34.2] - 2025-10-20
### ⚙️ Miscellaneous Tasks
- *(cli)* Minor fix to download script when specifying version (#3602)
- *(cli)* Update schema (#3613)
## [cli@1.34.1] - 2025-10-13
### 🐛 Bug Fixes
- *(cli)* Remove references to mcp-nhost (#3575)
- *(cli)* Workaround os.Rename issues when src and dst are on different partitions (#3599)
### ⚙️ Miscellaneous Tasks
- *(auth)* Change some references to deprecated hasura-auth (#3584)
- *(docs)* Udpated README.md and CONTRIBUTING.md (#3587)
## [cli@1.34.0] - 2025-10-09
### 🚀 Features
- *(cli)* Added mcp server functionality from mcp-nhost (#3550)
- *(cli)* Mcp: move configuration to .nhost folder and integrate cloud credentials (#3555)
- *(cli)* Mcp: added support for environment variables in the configuration (#3556)
- *(cli)* MCP refactor and documentation prior to official release (#3571)
### 🐛 Bug Fixes
- *(dashboard)* Remove NODE_ENV from restricted env vars (#3573)
### ⚙️ Miscellaneous Tasks
- *(nixops)* Update nhost-cli (#3554)
- *(cli)* Bump nhost/dashboard to 2.38.4 (#3539)
## [cli@1.33.0] - 2025-10-02
### 🚀 Features

View File

@@ -1,84 +0,0 @@
# Developer Guide
## Requirements
We use nix to manage the development environment, the build process and for running tests.
### With Nix (Recommended)
Run `nix develop \#cli` to get a complete development environment.
### Without Nix
Check `project.nix` (checkDeps, buildInputs, buildNativeInputs) for manual dependency installation. Alternatively, you can run `make nixops-container-env` in the root of the repository to enter a Docker container with nix and all dependencies pre-installed (note it is a large image).
## Development Workflow
### Running Tests
**With Nix:**
```bash
make dev-env-up
make check
```
**Without Nix:**
```bash
# Start development environment
make dev-env-up
# Lint Go code
golangci-lint run ./...
# Run tests
go test -v ./...
```
### Formatting
Format code before committing:
```bash
golines -w --base-formatter=gofumpt .
```
## Building
### Local Build
Build the project (output in `./result`):
```bash
make build
```
### Docker Image
Build and import Docker image with skopeo:
```bash
make build-docker-image
```
If you run the command above inside the dockerized nixops-container-env and you get an error like:
```
FATA[0000] writing blob: io: read/write on closed pipe
```
then you need to run the following command outside of the container (needs skopeo installed on the host):
```bash
cd cli
make build-docker-image-import-bare
```
### Multi-Platform Builds
Build for multiple platforms (Darwin/Linux, ARM64/AMD64):
```bash
make build-multiplatform
```
This produces binaries for:
- darwin/arm64
- darwin/amd64
- linux/arm64
- linux/amd64

202
cli/MCP.md Normal file
View File

@@ -0,0 +1,202 @@
# nhost mcp
A Model Context Protocol (MCP) server implementation for interacting with Nhost Cloud projects and services.
## Overview
MCP-Nhost is designed to provide a unified interface for managing Nhost projects through the Model Context Protocol. It enables seamless interaction with Nhost Cloud services, offering a robust set of tools for project management and configuration.
## Available Tools
The following tools are currently exposed through the MCP interface:
1. **cloud-get-graphql-schema**
- Provides the GraphQL schema for the Nhost Cloud platform
- Gives access to queries and mutations available for cloud management
2. **cloud-graphql-query**
- Executes GraphQL queries and mutations against the Nhost Cloud platform
- Enables project and organization management
- Allows querying and updating project configurations
- Mutations require enabling them when starting the server
3. **local-get-graphql-schema**
- Retrieves the GraphQL schema for local Nhost development projects
- Provides access to project-specific queries and mutations
- Helps understand available operations for local development helping generating code
- Uses "user" role unless specified otherwise
4. **local-graphql-query**
- Executes GraphQL queries against local Nhost development projects
- Enables testing and development of project-specific operations
- Supports both queries and mutations for local development
- Uses "user" role unless specified otherwise
5. **local-config-server-get-schema**
- Retrieves the GraphQL schema for the local config server
- Helps understand available configuration options for local projects
6. **local-config-server-query**
- Executes GraphQL queries against the local config server
- Enables querying and modifying local project configuration
- Changes require running 'nhost up' to take effect
7. **local-get-management-graphql-schema**
- Retrieves the GraphQL management schema for local projects
- Useful for understanding how to manage Hasura metadata, migrations, and permissions
- Provides insight into available management operations before using the management tool
8. **local-manage-graphql**
- Interacts with GraphQL's management endpoints for local projects
- Manages Hasura metadata, migrations, permissions, and remote schemas
- Creates and applies database migrations
- Handles data and schema changes through proper migration workflows
- Manages roles and permissions
9. **project-get-graphql-schema**
- Retrieves the GraphQL schema for Nhost Cloud projects
- Provides access to project-specific queries and mutations
- Uses "user" role unless specified otherwise
10. **project-graphql-query**
- Executes GraphQL queries against Nhost Cloud projects
- Enables interaction with live project data
- Supports both queries and mutations (need to be allowed)
- Uses "user" role unless specified otherwise
11. **search**
- Searches Nhost's official documentation
- Provides information about Nhost features, APIs, and guides
- Helps find relevant documentation for implementing features or solving issues
- Returns links to detailed documentation pages
## Screenshots and Examples
You can find screenshots and examples of the current features and tools in the [screenshots](docs/mcp/screenshots.md) file.
## Installing
To install mcp-nhost, you can use the following command:
```bash
sudo curl -L https://raw.githubusercontent.com/nhost/mcp-nhost/main/get.sh | bash
```
## Configuring
After installing mcp-nhost, you will need to configure it. You can do this by running the command `mcp-nhost config` in your terminal. See [CONFIG.md](docs/mcp/CONFIG.md) for more details.
## Configuring clients
#### Cursor
1. Go to "Cursor Settings"
2. Click on "MCP"
3. Click on "+ Add new global MCP server"
4. Add the following object inside `"mcpServers"`:
```json
"mcp-nhost": {
"command": "/usr/local/bin/mcp-nhost",
"args": [
"start",
],
}
```
## CLI Usage
For help on how to use the CLI, you can run:
```bash
mcp-nhost --help
```
Or check [USAGE.md](docs/mcp/USAGE.md) for more details.
## Troubleshooting
If you run into issues using the MCP server you can try running the tools yourself. For example:
```
# cloud-get-graphql-schema
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"cloud-get-graphql-schema","arguments":{}},"id":1}' | mcp-nhost start
# cloud-graphql-query
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"cloud-graphql-query","arguments":{"query":"{ apps { id subdomain name } }"}},"id":1}' | mcp-nhost start
# local-get-graphql-schema
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"local-get-graphql-schema","arguments":{"role":"user"}},"id":1}' | mcp-nhost start
# local-graphql-query
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"local-graphql-query","arguments":{"query":"{ users { id } }", "role":"admin"}},"id":1}' | mcp-nhost start
# local-config-server-get-schema
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"local-config-server-get-schema","arguments":{}},"id":1}' | mcp-nhost start
# local-config-server-query
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"local-config-server-query","arguments":{"query":"{ config(appID: \"00000000-0000-0000-0000-000000000000\", resolve: true) { postgres { version } } }"}},"id":1}' | mcp-nhost start
# local-get-management-graphql-schema
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"local-get-management-graphql-schema","arguments":{}},"id":1}' | mcp-nhost start
# local-manage-graphql
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"local-manage-graphql","arguments":{"body":"{\"type\":\"export_metadata\",\"args\":{}}","endpoint":"https://local.hasura.local.nhost.run/v1/metadata"}},"id":1}' | mcp-nhost start
# project-get-graphql-schema - set projectSubdomain to your own project
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"project-get-graphql-schema","arguments":{"projectSubdomain":"replaceMe", "role": "user"}},"id":1}' | mcp-nhost start
# project-graphql-query - set projectSubdomain to your own project
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"project-graphql-query","arguments":{"projectSubdomain":"replaceMe","query":"{ users { id } }", "role":"admin"}},"id":1}' | mcp-nhost start
# search
echo '{"jsonrpc":"2.0","method":"tools/call","params":{"name":"search","arguments":{"query":"how to enable magic links"}},"id":1}' | mcp-nhost start
# resources
echo '{"jsonrpc":"2.0","method":"resources/read","params":{"uri":"schema://asdsadasd"},"id":1}' | go run main.go mcp start
```
## Roadmap
- ✅ Cloud platform: Basic project and organization management
- ✅ Cloud projects: Configuration management
- ✅ Local projects: Configuration management
- ✅ Local projects: Graphql Schema awareness and query execution
- ✅ Cloud projects: Schema awareness and query execution
- ✅ Local projects: Create migrations
- ✅ Local projects: Manage permissions and relationships
- ✅ Documentation: integrate or document use of mintlify's mcp server
- ✅ Local projects: Auth and Storage schema awareness (maybe via mintlify?)
- ✅ Cloud projects: Auth and Storage schema awareness (maybe via mintlify?)
- 🔄 Local projects: Manage more metadata
If you have any suggestions or feature requests, please feel free to open an issue for discussion.
## Security and Privacy
### Enhanced Protection Layer
The MCP server is designed with security at its core, providing an additional protection layer beyond your existing GraphQL permissions. Key security features include:
- **Authentication enforcement** for all requests
- **Permission and role respect** based on your existing authorization system and the credentials provided
- **Query/mutation filtering** to further restrict allowed operations
### Granular Access Control
One of the MCP server's key security advantages is the ability to specify exactly which operations can pass through, even for authenticated users:
```toml
[[projects]]
subdomain = "my-blog"
region = "eu-central-1"
pat = "nhp_project_specific_pat"
allow_queries = ["getBlogs", "getCommends"]
allow_mutations = ["insertBlog", "insertComment"]
```
With the configuration above, an LLM will be able to only execute the queries and mutations above on behalf of a user even if the user has broader permissions in the Nhost project.
## Contributing
We welcome contributions to mcp-nhost! If you have suggestions, bug reports, or feature requests, please open an issue or submit a pull request.

View File

@@ -51,18 +51,11 @@ nhost up
nhost up --ui nhost
```
## MCP Server
The Nhost cli ships with an MCP server that lets you interact with your Nhost projects through AI assistants using the Model Context Protocol. It provides secure, controlled access to your GraphQL data, project configuration, and documentation—with granular permissions that let you specify exactly which queries and mutations an LLM can execute. For development, it streamlines your workflow by enabling AI-assisted schema management, metadata changes, and migrations, while providing direct access to your GraphQL schema for intelligent query building.
You can read more about the MCP server in the [MCP Server documentation](https://docs.nhost.io/platform/cli/mcp/overview).
## Documentation
- [Get started with Nhost CLI (longer version)](https://docs.nhost.io/platform/overview/get-started-with-nhost-cli)
- [Nhost CLI](https://docs.nhost.io/platform/cli)
- [Reference](https://docs.nhost.io/reference/cli)
- [MCP Server](https://docs.nhost.io/platform/cli/mcp/overview)
## Build from Source

View File

@@ -56,7 +56,7 @@ func CommandCloud() *cli.Command {
&cli.StringFlag{ //nolint:exhaustruct
Name: flagDashboardVersion,
Usage: "Dashboard version to use",
Value: "nhost/dashboard:2.42.0",
Value: "nhost/dashboard:2.38.0",
Sources: cli.EnvVars("NHOST_DASHBOARD_VERSION"),
},
&cli.StringFlag{ //nolint:exhaustruct

View File

@@ -111,7 +111,7 @@ func CommandUp() *cli.Command { //nolint:funlen
&cli.StringFlag{ //nolint:exhaustruct
Name: flagDashboardVersion,
Usage: "Dashboard version to use",
Value: "nhost/dashboard:2.42.0",
Value: "nhost/dashboard:2.38.0",
Sources: cli.EnvVars("NHOST_DASHBOARD_VERSION"),
},
&cli.StringFlag{ //nolint:exhaustruct

View File

@@ -17,7 +17,7 @@ func actionDump(_ context.Context, cmd *cli.Command) error {
cfg, err := config.Load(configPath)
if err != nil {
fmt.Println("Please, run `nhost mcp config` to configure the service.") //nolint:forbidigo
fmt.Println("Please, run `mcp-nhost config` to configure the service.") //nolint:forbidigo
return cli.Exit("failed to load config file "+err.Error(), 1)
}

View File

@@ -14,7 +14,6 @@ import (
nhostmcp "github.com/nhost/nhost/cli/cmd/mcp"
"github.com/nhost/nhost/cli/cmd/mcp/start"
"github.com/nhost/nhost/cli/cmd/user"
"github.com/nhost/nhost/cli/mcp/resources"
"github.com/nhost/nhost/cli/mcp/tools/cloud"
"github.com/nhost/nhost/cli/mcp/tools/docs"
"github.com/nhost/nhost/cli/mcp/tools/project"
@@ -97,14 +96,8 @@ func TestStart(t *testing.T) { //nolint:cyclop,maintidx,paralleltest
Experimental: nil,
Logging: nil,
Prompts: nil,
Resources: &struct {
Subscribe bool "json:\"subscribe,omitempty\""
ListChanged bool "json:\"listChanged,omitempty\""
}{
Subscribe: false,
ListChanged: false,
},
Sampling: nil,
Resources: nil,
Sampling: nil,
Tools: &struct {
ListChanged bool "json:\"listChanged,omitempty\""
}{
@@ -115,22 +108,10 @@ func TestStart(t *testing.T) { //nolint:cyclop,maintidx,paralleltest
Name: "mcp",
Version: "",
},
Instructions: start.ServerInstructions + `
Configured projects:
Instructions: start.ServerInstructions + `Configured projects:
- local (local): Local development project running via the Nhost CLI
- asdasdasdasdasd (eu-central-1): Staging project for my awesome app
- qweqweqweqweqwe (us-east-1): Production project for my awesome app
The following resources are available:
- schema://nhost-cloud: Schema to interact with the Nhost Cloud. Projects are equivalent
to apps in the schema. IDs are typically uuids.
- schema://graphql-management: GraphQL's management schema for an Nhost project.
This tool is useful to properly understand how manage hasura metadata, migrations,
permissions, remote schemas, etc.
- schema://nhost.toml: Cuelang schema for the nhost.toml configuration file. Run nhost
config validate after making changes to your nhost.toml file to ensure it is valid.
`,
Result: mcp.Result{
Meta: nil,
@@ -185,31 +166,24 @@ config validate after making changes to your nhost.toml file to ensure it is val
Type: "object",
Properties: map[string]any{
"role": map[string]any{
"description": string("role to use when executing queries. Keep in mind the schema depends on the role so if you retrieved the schema for a different role previously retrieve it for this role beforehand as it might differ"),
"default": string("user"),
"description": string("Role to use when fetching the schema. Useful only services `local` and `project`"),
"type": string("string"),
},
"service": map[string]any{
"enum": []any{
string("nhost"), string("config-schema"), string("graphql-management"),
string("project"),
},
"type": string("string"),
},
"subdomain": map[string]any{
"description": string("Project to get the GraphQL schema for. Required when service is `project`"),
"enum": []any{string("local"), string("asdasdasdasdasd"), string("qweqweqweqweqwe")},
"type": string("string"),
},
"mutations": map[string]any{
"description": string("list of mutations to fetch"),
"type": string("array"),
"items": map[string]any{"type": string("string")},
},
"queries": map[string]any{
"description": string("list of queries to fetch"),
"type": string("array"),
"items": map[string]any{"type": string("string")},
},
"summary": map[string]any{
"default": bool(true),
"description": string("only return a summary of the schema"),
"type": string("boolean"),
},
},
Required: []string{"role", "subdomain"},
Required: []string{"service"},
},
Annotations: mcp.ToolAnnotation{
Title: "Get GraphQL/API schema for various services",
@@ -239,8 +213,9 @@ config validate after making changes to your nhost.toml file to ensure it is val
},
},
"role": map[string]any{
"description": "role to use when executing queries. Keep in mind the schema depends on the role so if you retrieved the schema for a different role previously retrieve it for this role beforehand as it might differ",
"description": "role to use when executing queries. Default to user but make sure the user is aware. Keep in mind the schema depends on the role so if you retrieved the schema for a different role previously retrieve it for this role beforehand as it might differ",
"type": "string",
"default": "user",
},
"userId": map[string]any{
"description": string("Overrides X-Hasura-User-Id in the GraphQL query/mutation. Credentials must allow it (i.e. admin secret must be in use)"),
@@ -251,7 +226,7 @@ config validate after making changes to your nhost.toml file to ensure it is val
"type": "string",
},
},
Required: []string{"query", "subdomain", "role"},
Required: []string{"query", "subdomain"},
},
Annotations: mcp.ToolAnnotation{
Title: "Perform GraphQL Query on Nhost Project running on Nhost Cloud",
@@ -271,10 +246,6 @@ config validate after making changes to your nhost.toml file to ensure it is val
"description": "The body for the HTTP request",
"type": "string",
},
"path": map[string]any{
"description": "The path for the HTTP request",
"type": "string",
},
"subdomain": map[string]any{
"description": "Project to perform the GraphQL management operation against",
"type": "string",
@@ -285,7 +256,7 @@ config validate after making changes to your nhost.toml file to ensure it is val
},
},
},
Required: []string{"subdomain", "path", "body"},
Required: []string{"subdomain", "body"},
},
Annotations: mcp.ToolAnnotation{
Title: "Manage GraphQL's Metadata on an Nhost Development Project",
@@ -325,60 +296,24 @@ config validate after making changes to your nhost.toml file to ensure it is val
t.Errorf("ListToolsResult mismatch (-want +got):\n%s", diff)
}
resourceList, err := mcpClient.ListResources(
context.Background(),
mcp.ListResourcesRequest{}, //nolint:exhaustruct
)
if err != nil {
t.Fatalf("failed to list resources: %v", err)
}
if res.Capabilities.Resources != nil {
resources, err := mcpClient.ListResources(
context.Background(),
mcp.ListResourcesRequest{}, //nolint:exhaustruct
)
if err != nil {
t.Fatalf("failed to list resources: %v", err)
}
if diff := cmp.Diff(
resourceList,
//nolint:exhaustruct
&mcp.ListResourcesResult{
Resources: []mcp.Resource{
{
Annotated: mcp.Annotated{
Annotations: &mcp.Annotations{
Audience: []mcp.Role{"agent"},
Priority: 9,
},
},
URI: "schema://graphql-management",
Name: "graphql-management",
Description: resources.GraphqlManagementDescription,
MIMEType: "text/plain",
},
{
Annotated: mcp.Annotated{
Annotations: &mcp.Annotations{
Audience: []mcp.Role{"agent"},
Priority: 9,
},
},
URI: "schema://nhost-cloud",
Name: "nhost-cloud",
Description: resources.CloudDescription,
MIMEType: "text/plain",
},
{
Annotated: mcp.Annotated{
Annotations: &mcp.Annotations{
Audience: []mcp.Role{"agent"},
Priority: 9,
},
},
URI: "schema://nhost.toml",
Name: "nhost.toml",
Description: resources.NhostTomlResourceDescription,
MIMEType: "text/plain",
},
if diff := cmp.Diff(
resources,
//nolint:exhaustruct
&mcp.ListResourcesResult{
Resources: []mcp.Resource{},
},
},
); diff != "" {
t.Errorf("ListResourcesResult mismatch (-want +got):\n%s", diff)
); diff != "" {
t.Errorf("ListResourcesResult mismatch (-want +got):\n%s", diff)
}
}
if res.Capabilities.Prompts != nil {

View File

@@ -8,7 +8,6 @@ import (
"github.com/nhost/nhost/cli/clienv"
"github.com/nhost/nhost/cli/mcp/config"
"github.com/nhost/nhost/cli/mcp/nhost/auth"
"github.com/nhost/nhost/cli/mcp/resources"
"github.com/nhost/nhost/cli/mcp/tools/cloud"
"github.com/nhost/nhost/cli/mcp/tools/docs"
"github.com/nhost/nhost/cli/mcp/tools/project"
@@ -26,20 +25,21 @@ const (
// this seems to be largely ignored by clients, or at least by cursor.
// we also need to look into roots and resources as those might be helpful.
ServerInstructions = `
This is an MCP server to interact with the Nhost Cloud and with Nhost projects.
This is an MCP server to interact with Nhost Cloud and with projects running on it and
also with Nhost local development projects.
Important notes to anyone using this MCP server. Do not use this MCP server without
following these instructions:
1. Make sure you are clear on which environment the user wants to operate against.
2. Before attempting to call any tool, always make sure you list resources, roots, and
resource templates to understand what is available.
3. Apps and projects are the same and while users may talk about projects in Nhost's GraphQL
2. Before attempting to call any tool *-graphql-query, always make sure you read the various
resources and use the get-schema tool to get the required schemas
3. Apps and projects are the same and while users may talk about projects in the GraphQL
api those are referred as apps.
4. If you have an error querying the GraphQL API, please check the schema again. The schema may
4. IDs are always UUIDs so if you have anything else (like an app/project name) you may need
to first get the ID using the *-graphql-query tool.
5. If you have an error querying the GraphQL API, please check the schema again. The schema may
have changed and the query you are using may be invalid.
5. Always follow the instructions provided by each tool. If you need to deviate from these
instructions, please, confirm with the user before doing so.
`
)
@@ -83,10 +83,7 @@ func action(ctx context.Context, cmd *cli.Command) error {
}
ServerInstructions := ServerInstructions
ServerInstructions += "\n\n"
ServerInstructions += cfg.Projects.Instructions()
ServerInstructions += "\n"
ServerInstructions += resources.Instructions()
mcpServer := server.NewMCPServer(
cmd.Root().Name,
@@ -94,10 +91,6 @@ func action(ctx context.Context, cmd *cli.Command) error {
server.WithInstructions(ServerInstructions),
)
if err := resources.Register(cfg, mcpServer); err != nil {
return cli.Exit(fmt.Sprintf("failed to register resources: %s", err), 1)
}
if cfg.Cloud != nil {
if err := registerCloud(
cmd,
@@ -137,7 +130,7 @@ func getConfig(cmd *cli.Command) (*config.Config, error) {
cfg, err := config.Load(configPath)
if err != nil {
fmt.Println("Please, run `nhost mcp config` to configure the service.") //nolint:forbidigo
fmt.Println("Please, run `mcp-nhost config` to configure the service.") //nolint:forbidigo
return nil, cli.Exit("failed to load config file "+err.Error(), 1)
}

View File

@@ -131,7 +131,7 @@ func initInit(
getclient := &getter.Client{} //nolint:exhaustruct
if _, err := getclient.Get(ctx, &getter.Request{ //nolint:exhaustruct
Src: "git::https://github.com/nhost/nhost.git//services/auth/email-templates",
Src: "git::https://github.com/nhost/hasura-auth.git//email-templates",
Dst: "nhost/emails",
DisableSymlinks: true,
}); err != nil {

View File

@@ -2,13 +2,10 @@ package software
import (
"context"
"errors"
"fmt"
"io"
"os"
"runtime"
"strings"
"syscall"
"github.com/nhost/nhost/cli/clienv"
"github.com/nhost/nhost/cli/software"
@@ -95,8 +92,8 @@ func install(cmd *cli.Command, ce *clienv.CliEnv, tmpFile string) error {
ce.Infoln("Copying to %s...", curBin)
if err := moveOrCopyFile(tmpFile, curBin); err != nil {
return fmt.Errorf("failed to move %s to %s: %w", tmpFile, curBin, err)
if err := os.Rename(tmpFile, curBin); err != nil {
return fmt.Errorf("failed to rename %s to %s: %w", tmpFile, curBin, err)
}
ce.Infoln("Setting permissions...")
@@ -107,55 +104,3 @@ func install(cmd *cli.Command, ce *clienv.CliEnv, tmpFile string) error {
return nil
}
func moveOrCopyFile(src, dst string) error {
if err := os.Rename(src, dst); err != nil {
var linkErr *os.LinkError
// this happens when moving across different filesystems
if errors.As(err, &linkErr) && errors.Is(linkErr.Err, syscall.EXDEV) {
if err := hardMove(src, dst); err != nil {
return fmt.Errorf("failed to hard move: %w", err)
}
return nil
}
return fmt.Errorf("failed to rename: %w", err)
}
return nil
}
func hardMove(src, dst string) error {
srcFile, err := os.Open(src)
if err != nil {
return fmt.Errorf("failed to open source file: %w", err)
}
defer srcFile.Close()
dstFile, err := os.Create(dst)
if err != nil {
return fmt.Errorf("failed to create destination file: %w", err)
}
defer dstFile.Close()
if _, err := io.Copy(dstFile, srcFile); err != nil {
return fmt.Errorf("failed to copy file contents: %w", err)
}
fi, err := os.Stat(src)
if err != nil {
return fmt.Errorf("failed to stat source file: %w", err)
}
err = os.Chmod(dst, fi.Mode())
if err != nil {
return fmt.Errorf("failed to set file permissions: %w", err)
}
if err := os.Remove(src); err != nil {
return fmt.Errorf("failed to remove source file: %w", err)
}
return nil
}

View File

@@ -47,7 +47,7 @@ func auth( //nolint:funlen
&model.ConfigSmtp{
User: "user",
Password: "password",
Sender: "auth@example.com",
Sender: "hasura-auth@example.com",
Host: "mailhog",
Port: 1025, //nolint:mnd
Secure: false,
@@ -56,7 +56,6 @@ func auth( //nolint:funlen
false,
false,
"00000000-0000-0000-0000-000000000000",
"5181f67e2844e4b60d571fa346cac9c37fc00d1ff519212eae6cead138e639ba",
)
if err != nil {
return nil, fmt.Errorf("failed to get hasura env vars: %w", err)

View File

@@ -33,7 +33,6 @@ func expectedAuth() *Service {
"AUTH_DISABLE_SIGNUP": "false",
"AUTH_EMAIL_PASSWORDLESS_ENABLED": "true",
"AUTH_EMAIL_SIGNIN_EMAIL_VERIFIED_REQUIRED": "true",
"AUTH_ENCRYPTION_KEY": "5181f67e2844e4b60d571fa346cac9c37fc00d1ff519212eae6cead138e639ba",
"AUTH_GRAVATAR_DEFAULT": "gravatarDefault",
"AUTH_GRAVATAR_ENABLED": "true",
"AUTH_GRAVATAR_RATING": "gravatarRating",
@@ -53,7 +52,6 @@ func expectedAuth() *Service {
"AUTH_PROVIDER_APPLE_ENABLED": "true",
"AUTH_PROVIDER_APPLE_KEY_ID": "appleKeyId",
"AUTH_PROVIDER_APPLE_PRIVATE_KEY": "applePrivateKey",
"AUTH_PROVIDER_APPLE_SCOPE": "",
"AUTH_PROVIDER_APPLE_TEAM_ID": "appleTeamId",
"AUTH_PROVIDER_AZUREAD_CLIENT_ID": "azureadClientId",
"AUTH_PROVIDER_AZUREAD_CLIENT_SECRET": "azureadClientSecret",
@@ -76,12 +74,9 @@ func expectedAuth() *Service {
"AUTH_PROVIDER_FACEBOOK_CLIENT_SECRET": "facebookClientSecret",
"AUTH_PROVIDER_FACEBOOK_ENABLED": "true",
"AUTH_PROVIDER_FACEBOOK_SCOPE": "email",
"AUTH_PROVIDER_GITHUB_AUDIENCE": "audience",
"AUTH_PROVIDER_GITHUB_CLIENT_ID": "githubClientId",
"AUTH_PROVIDER_GITHUB_CLIENT_SECRET": "githubClientSecret",
"AUTH_PROVIDER_GITHUB_ENABLED": "true",
"AUTH_PROVIDER_GITHUB_SCOPE": "user:email",
"AUTH_PROVIDER_GITLAB_AUDIENCE": "audience",
"AUTH_PROVIDER_GITLAB_CLIENT_ID": "gitlabClientId",
"AUTH_PROVIDER_GITLAB_CLIENT_SECRET": "gitlabClientSecret",
"AUTH_PROVIDER_GITLAB_ENABLED": "true",
@@ -101,7 +96,6 @@ func expectedAuth() *Service {
"AUTH_PROVIDER_SPOTIFY_CLIENT_SECRET": "spotifyClientSecret",
"AUTH_PROVIDER_SPOTIFY_ENABLED": "true",
"AUTH_PROVIDER_SPOTIFY_SCOPE": "user-read-email",
"AUTH_PROVIDER_STRAVA_AUDIENCE": "audience",
"AUTH_PROVIDER_STRAVA_CLIENT_ID": "stravaClientId",
"AUTH_PROVIDER_STRAVA_CLIENT_SECRET": "stravaClientSecret",
"AUTH_PROVIDER_STRAVA_ENABLED": "true",
@@ -149,7 +143,7 @@ func expectedAuth() *Service {
"AUTH_SMTP_PASS": "password",
"AUTH_SMTP_PORT": "1025",
"AUTH_SMTP_SECURE": "false",
"AUTH_SMTP_SENDER": "auth@example.com",
"AUTH_SMTP_SENDER": "hasura-auth@example.com",
"AUTH_SMTP_USER": "user",
"AUTH_USER_DEFAULT_ALLOWED_ROLES": "user,admin",
"AUTH_USER_DEFAULT_ROLE": "user",

View File

@@ -344,7 +344,7 @@ func dashboard(
subdomain, "hasura", httpPort, useTLS,
) + "/console",
"NEXT_PUBLIC_NHOST_HASURA_MIGRATIONS_API_URL": URL(
subdomain, "hasura", httpPort, useTLS) + "/apis/migrate",
subdomain, "hasura", httpPort, useTLS),
"NEXT_PUBLIC_NHOST_STORAGE_URL": URL(
subdomain, "storage", httpPort, useTLS) + "/v1",
},
@@ -459,7 +459,7 @@ func mailhog(subdomain, volumeName string, useTLS bool) *Service {
"SMTP_PASS": "password",
"SMTP_PORT": "1025",
"SMTP_SECURE": "false",
"SMTP_SENDER": "auth@example.com",
"SMTP_SENDER": "hasura-auth@example.com",
"SMTP_USER": "user",
},
ExtraHosts: extraHosts(subdomain),

80
cli/docs/mcp/CONFIG.md Normal file
View File

@@ -0,0 +1,80 @@
# Configuration
This document describes all available configuration options for the Nhost MCP tool. The configuration file uses TOML format.
## TOML
```toml
# Cloud configuration for managing Nhost Cloud projects and organizations
# Remove section to disable this access
[cloud]
# Personal Access Token (PAT) for Nhost Cloud API authentication
# Get one at: https://app.nhost.io/account
pat = "your-pat-here"
# Enable mutations on Nhost Cloud configurations
# When false, only queries are allowed
enable_mutations = true
# Local configuration for interacting with Nhost CLI projects
# Remove section to disable access
[local]
# Admin secret for local project authentication
admin_secret = "your-admin-secret"
# Optional: Custom config server URL
# Default: https://local.dashboard.local.nhost.run/v1/configserver/graphql
config_server_url = "your-custom-url"
# Optional: Custom GraphQL URL
# Default: https://local.graphql.local.nhost.run/v1
graphql_url = "your-custom-url"
# Project-specific configurations
[[projects]]
# Project subdomain (required)
subdomain = "your-project-subdomain"
# Project region (required)
region = "your-project-region"
# Authentication: Use either admin_secret or pat
# Admin secret for project access
admin_secret = "your-project-admin-secret"
# OR
# Project-specific PAT
pat = "your-project-pat"
# List of allowed GraphQL queries
# Use ["*"] to allow all queries, [] to disable all
allow_queries = ["*"]
# List of allowed GraphQL mutations
# Use ["*"] to allow all mutations, [] to disable all
# Only effective if mutations are enabled for the project
allow_mutations = ["*"]
```
## Example Configuration
```toml
[cloud]
pat = "1234567890abcdef"
enable_mutations = true
[local]
admin_secret = "nhost-admin-secret"
[[projects]]
subdomain = "my-app"
region = "eu-central-1"
admin_secret = "project-admin-secret"
allow_queries = ["*"]
allow_mutations = ["createUser", "updateUser"]
[[projects]]
subdomain = "another-app"
region = "us-east-1"
pat = "nhp_project_specific_pat"
allow_queries = ["getUsers", "getPosts"]
allow_mutations = []
```

96
cli/docs/mcp/USAGE.md Normal file
View File

@@ -0,0 +1,96 @@
# NAME
nhost-mcp - Nhost's Model Context Protocol (MCP) server
# SYNOPSIS
nhost-mcp
```
[--help|-h]
[--version|-v]
```
**Usage**:
```
nhost-mcp [GLOBAL OPTIONS] [command [COMMAND OPTIONS]] [ARGUMENTS...]
```
# GLOBAL OPTIONS
**--help, -h**: show help
**--version, -v**: print the version
# COMMANDS
## docs
Generate markdown documentation for the CLI
**--help, -h**: show help
### help, h
Shows a list of commands or help for one command
## config
Generate and save configuration file
**--config-file**="": Configuration file path (default: /Users/dbarroso/.config/nhost/mcp-nhost.toml)
**--confirm**: Skip confirmation prompt
**--help, -h**: show help
### help, h
Shows a list of commands or help for one command
## start
Starts the MCP server
**--bind**="": Bind address in the form <host>:<port>. If omitted use stdio
**--config-file**="": Path to the config file (default: /Users/dbarroso/.config/nhost/mcp-nhost.toml)
**--help, -h**: show help
### help, h
Shows a list of commands or help for one command
## gen
Generate GraphQL schema for Nhost Cloud
**--help, -h**: show help
**--nhost-pat**="": Personal Access Token
**--with-mutations**: Include mutations in the generated schema
### help, h
Shows a list of commands or help for one command
## upgrade
Checks if there is a new version and upgrades it
**--confirm**: Confirm the upgrade without prompting
**--help, -h**: show help
### help, h
Shows a list of commands or help for one command
## help, h
Shows a list of commands or help for one command

View File

@@ -0,0 +1,49 @@
# Screenshots
Listing cloud projects:
<img src="screenshots/101-cloud-projects.png" width="600" alt="listing cloud projects">
Changing cloud project's configuration:
<img src="screenshots/102-cloud-project-config.png" width="600" alt="changing cloud project's configuration">
Querying cloud project's configuration:
<img src="screenshots/103-cloud-project-config2.png" width="600" alt="querying cloud project's configuration">
Querying local project's schema:
<img src="screenshots/201-local-schema.png" width="600" alt="querying local project's schema">
Generating code from local project's schema:
<img src="screenshots/202-local-code.png" alt="generating code from local project's schema">
Resulting code:
<img src="screenshots/203-result.png" alt="resulting code">
Querying local project's configuration:
<img src="screenshots/204-local-config-query.png" width="600" alt="querying local project's configuration">
Modifying local project's configuration:
<img src="screenshots/205-local-config-change.png" width="600" alt="modifying local project's configuration">
Querying cloud project's schema:
<img src="screenshots/301-project-schema.png" width="600" alt="project schema">
Querying cloud project's data:
<img src="screenshots/302-project-query.png" width="600" alt="project data">
Managing cloud project's data:
<img src="screenshots/303-project-mutation.png" width="600" alt="project mutation">
Analysing cloud project's data:
<img src="screenshots/304-project-data-analysis.png" width="600" alt="project data analysis">

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 278 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 145 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 296 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 819 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 235 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 294 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 270 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 152 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 107 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 181 KiB

View File

@@ -3,10 +3,6 @@
name = 'GREET'
value = 'Sayonara'
[[global.environment]]
name = 'NODE_ENV'
value = 'production'
[hasura]
version = 'v2.46.0-ce'
adminSecret = '{{ secrets.HASURA_GRAPHQL_ADMIN_SECRET }}'

View File

@@ -44,7 +44,7 @@ if [[ "$version" == "latest" ]]; then
release=$(curl --silent https://api.github.com/repos/nhost/nhost/releases\?per_page=100 | grep tag_name | grep \"cli\@ | head -n 1 | sed 's/.*"tag_name": "\([^"]*\)".*/\1/')
version=$( echo $release | sed 's/.*@//')
else
release="cli@$version"
release="cli@$release"
fi
# check version exists

View File

@@ -1,7 +1,6 @@
package graphql
import (
"encoding/json"
"fmt"
"sort"
"strings"
@@ -87,30 +86,6 @@ func ParseSchema(response ResponseIntrospection, filter Filter) string { //nolin
return render(neededQueries, neededMutations, neededTypes)
}
func SummarizeSchema(response ResponseIntrospection) string {
summary := map[string][]string{
"query": make([]string, len(response.Data.Schema.QueryType.Fields)),
}
for i, query := range response.Data.Schema.QueryType.Fields {
summary["query"][i] = query.Name
}
if response.Data.Schema.MutationType != nil {
summary["mutation"] = make([]string, len(response.Data.Schema.MutationType.Fields))
for _, mutation := range response.Data.Schema.MutationType.Fields {
summary["mutation"] = append(summary["mutation"], mutation.Name)
}
}
b, err := json.MarshalIndent(summary, "", " ")
if err != nil {
return fmt.Sprintf("failed to marshal summary: %v", err)
}
return string(b)
}
func filterNestedArgs(
args []InputValue, neededTypes map[string]Type,
) []InputValue {

View File

@@ -24,10 +24,6 @@ func checkAllowedOperation(
selectionSet ast.SelectionSet,
allowed []string,
) error {
if slices.Contains(allowed, "*") {
return nil
}
for _, v := range selectionSet {
if v, ok := v.(*ast.Field); ok {
if len(v.SelectionSet) > 0 && !slices.Contains(allowed, v.Name) {

View File

@@ -1,6 +1,6 @@
// Package auth provides primitives to interact with the openapi HTTP API.
//
// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version 2.5.0 DO NOT EDIT.
// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version 2.4.1 DO NOT EDIT.
package auth
import (

View File

@@ -1,6 +1,6 @@
// Package graphql provides primitives to interact with the openapi HTTP API.
//
// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version 2.5.0 DO NOT EDIT.
// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version 2.4.1 DO NOT EDIT.
package graphql
import (

View File

@@ -1,69 +0,0 @@
package resources
import (
"context"
_ "embed"
"github.com/mark3labs/mcp-go/mcp"
"github.com/mark3labs/mcp-go/server"
"github.com/nhost/nhost/cli/mcp/config"
)
//go:embed cloud_schema.graphql
var schemaGraphql string
//go:embed cloud_schema-with-mutations.graphql
var schemaGraphqlWithMutations string
const (
CloudResourceURI = "schema://nhost-cloud"
CloudDescription = `Schema to interact with the Nhost Cloud. Projects are equivalent
to apps in the schema. IDs are typically uuids.`
)
type Cloud struct {
schema string
}
func NewCloud(cfg *config.Config) *Cloud {
schema := schemaGraphql
if cfg.Cloud.EnableMutations {
schema = schemaGraphqlWithMutations
}
return &Cloud{
schema: schema,
}
}
func (t *Cloud) Register(server *server.MCPServer) {
server.AddResource(
mcp.Resource{
URI: CloudResourceURI,
Name: "nhost-cloud",
Annotated: mcp.Annotated{
Annotations: &mcp.Annotations{
Audience: []mcp.Role{"agent"},
Priority: 9.0, //nolint:mnd
},
},
Description: CloudDescription,
MIMEType: "text/plain",
Meta: nil,
},
t.handle,
)
}
func (t *Cloud) handle(
_ context.Context, request mcp.ReadResourceRequest,
) ([]mcp.ResourceContents, error) {
return []mcp.ResourceContents{
mcp.TextResourceContents{
URI: request.Params.URI,
MIMEType: "text/plain",
Text: t.schema,
Meta: nil,
},
}, nil
}

View File

@@ -0,0 +1,5 @@
package resources
import "errors"
var ErrParameterRequired = errors.New("parameter required")

View File

@@ -1,54 +0,0 @@
package resources
import (
"context"
"github.com/mark3labs/mcp-go/mcp"
"github.com/mark3labs/mcp-go/server"
"github.com/nhost/nhost/cli/mcp/nhost/graphql"
)
const (
GraphqlManagementResourceURI = "schema://graphql-management"
GraphqlManagementDescription = `GraphQL's management schema for an Nhost project.
This tool is useful to properly understand how manage hasura metadata, migrations,
permissions, remote schemas, etc.`
)
type GraphqlManagement struct{}
func NewGraphqlManagement() *GraphqlManagement {
return &GraphqlManagement{}
}
func (t *GraphqlManagement) Register(server *server.MCPServer) {
server.AddResource(
mcp.Resource{
URI: GraphqlManagementResourceURI,
Name: "graphql-management",
Annotated: mcp.Annotated{
Annotations: &mcp.Annotations{
Audience: []mcp.Role{"agent"},
Priority: 9.0, //nolint:mnd
},
},
Description: GraphqlManagementDescription,
MIMEType: "text/plain",
Meta: nil,
},
t.handle,
)
}
func (t *GraphqlManagement) handle(
_ context.Context, request mcp.ReadResourceRequest,
) ([]mcp.ResourceContents, error) {
return []mcp.ResourceContents{
mcp.TextResourceContents{
URI: request.Params.URI,
MIMEType: "text/plain",
Text: graphql.Schema,
Meta: nil,
},
}, nil
}

View File

@@ -1,57 +0,0 @@
package resources
import (
"context"
_ "embed"
"github.com/mark3labs/mcp-go/mcp"
"github.com/mark3labs/mcp-go/server"
)
//go:embed nhost_toml_schema.cue
var schemaNhostToml string
const (
NhostTomlResourceURI = "schema://nhost.toml"
NhostTomlResourceDescription = `Cuelang schema for the nhost.toml configuration file. Run nhost
config validate after making changes to your nhost.toml file to ensure it is valid.`
)
type NhostToml struct{}
func NewNhostToml() *NhostToml {
return &NhostToml{}
}
func (t *NhostToml) Register(server *server.MCPServer) {
server.AddResource(
mcp.Resource{
URI: NhostTomlResourceURI,
Name: "nhost.toml",
Annotated: mcp.Annotated{
Annotations: &mcp.Annotations{
Audience: []mcp.Role{"agent"},
Priority: 9.0, //nolint:mnd
},
},
Description: NhostTomlResourceDescription,
MIMEType: "text/plain",
Meta: nil,
},
t.handle,
)
}
//go:generate cp ../../../vendor/github.com/nhost/be/services/mimir/schema/schema.cue nhost_toml_schema.cue
func (t *NhostToml) handle(
_ context.Context, request mcp.ReadResourceRequest,
) ([]mcp.ResourceContents, error) {
return []mcp.ResourceContents{
mcp.TextResourceContents{
URI: request.Params.URI,
MIMEType: "text/plain",
Text: schemaNhostToml,
Meta: nil,
},
}, nil
}

View File

@@ -1,40 +0,0 @@
package resources
import (
"fmt"
"github.com/mark3labs/mcp-go/server"
"github.com/nhost/nhost/cli/mcp/config"
)
func Instructions() string {
return "The following resources are available:\n\n" +
fmt.Sprintf("- %s: %s\n", CloudResourceURI, CloudDescription) +
fmt.Sprintf("- %s: %s\n", GraphqlManagementResourceURI, GraphqlManagementDescription) +
fmt.Sprintf("- %s: %s\n", NhostTomlResourceURI, NhostTomlResourceDescription)
}
func Register(cfg *config.Config, server *server.MCPServer) error {
nt := NewNhostToml()
nt.Register(server)
if cfg.Cloud != nil {
ct := NewCloud(cfg)
ct.Register(server)
}
enableGraphlManagement := false
for _, project := range cfg.Projects {
if project.ManageMetadata {
enableGraphlManagement = true
break
}
}
if enableGraphlManagement {
gmt := NewGraphqlManagement()
gmt.Register(server)
}
return nil
}

View File

@@ -12,7 +12,7 @@ import (
const (
ToolGraphqlQueryName = "cloud-graphql-query"
//nolint:lll
ToolGraphqlQueryInstructions = `Execute a GraphQL query against the Nhost Cloud to perform operations on projects and organizations. It also allows configuring projects hosted on Nhost Cloud. Make sure you got the schema before attempting to execute any query. If you get an error while performing a query refresh the schema in case something has changed or you did something wrong. If you get an error indicating mutations are not allowed the user may have disabled them in the server, don't retry and ask the user they need to pass --with-cloud-mutations when starting nhost's mcp to enable them. Projects are apps.`
ToolGraphqlQueryInstructions = `Execute a GraphQL query against the Nhost Cloud to perform operations on projects and organizations. It also allows configuring projects hosted on Nhost Cloud. Make sure you got the schema before attempting to execute any query. If you get an error while performing a query refresh the schema in case something has changed or you did something wrong. If you get an error indicating mutations are not allowed the user may have disabled them in the server, don't retry and ask the user they need to pass --with-cloud-mutations when starting mcp-nhost to enable them. Projects are apps.`
)
func ptr[T any](v T) *T {
@@ -59,7 +59,7 @@ func (t *Tool) handleGraphqlQuery(
allowedMutations := []string{}
if t.withMutations {
allowedMutations = []string{"*"}
allowedMutations = nil
}
var resp graphql.Response[any]
@@ -69,7 +69,7 @@ func (t *Tool) handleGraphqlQuery(
args.Query,
args.Variables,
&resp,
[]string{"*"},
nil,
allowedMutations,
t.interceptors...,
); err != nil {

View File

@@ -24,7 +24,7 @@ const (
## Metadata changes
* When changing metadata ALWAYS use the /apis/migrate endpoint
* When changing metadata always use the /apis/migrate endpoint
* Always perform a bulk request to avoid
having to perform multiple requests
* The admin user always has full permissions to everything by default, no need to configure
@@ -56,7 +56,6 @@ const (
type ManageGraphqlRequest struct {
Body string `json:"body"`
Subdomain string `json:"subdomain"`
Path string `json:"path"`
}
func (t *Tool) registerManageGraphql(mcpServer *server.MCPServer) {
@@ -78,11 +77,6 @@ func (t *Tool) registerManageGraphql(mcpServer *server.MCPServer) {
mcp.Enum(t.cfg.Projects.Subdomains()...),
mcp.Required(),
),
mcp.WithString(
"path",
mcp.Description("The path for the HTTP request"),
mcp.Required(),
),
mcp.WithString(
"body",
mcp.Description("The body for the HTTP request"),
@@ -171,7 +165,7 @@ func (t *Tool) handleManageGraphql(
}
response, err := genericQuery(
ctx, project.GetHasuraURL()+args.Path, args.Body, http.MethodPost, headers, interceptors,
ctx, project.GetHasuraURL(), args.Body, http.MethodPost, headers, interceptors,
)
if err != nil {
return mcp.NewToolResultErrorFromErr("failed to execute query", err), nil

View File

@@ -14,7 +14,7 @@ import (
const (
ToolGraphqlQueryName = "graphql-query"
//nolint:lll
ToolGraphqlQueryInstructions = `Execute a GraphQL query against a Nhost project. This tool is useful to query and mutate data. If you run into issues executing queries, retrieve the schema again in case the schema has changed. If you get an error indicating the query or mutation is not allowed the user may have disabled them in the server, don't retry and tell the user they need to enable them when starting nhost's mcp`
ToolGraphqlQueryInstructions = `Execute a GraphQL query against a Nhost project. This tool is useful to query and mutate data. If you run into issues executing queries, retrieve the schema again in case the schema has changed. If you get an error indicating the query or mutation is not allowed the user may have disabled them in the server, don't retry and tell the user they need to enable them when starting mcp-nhost`
)
func ptr[T any](v T) *T {
@@ -69,9 +69,9 @@ func (t *Tool) registerGraphqlQuery(mcpServer *server.MCPServer) {
mcp.WithString(
"role",
mcp.Description(
"role to use when executing queries. Keep in mind the schema depends on the role so if you retrieved the schema for a different role previously retrieve it for this role beforehand as it might differ", //nolint:lll
"role to use when executing queries. Default to user but make sure the user is aware. Keep in mind the schema depends on the role so if you retrieved the schema for a different role previously retrieve it for this role beforehand as it might differ", //nolint:lll
),
mcp.Required(),
mcp.DefaultString("user"),
),
mcp.WithString(
"userId",

View File

@@ -0,0 +1,25 @@
package schemas
import (
_ "embed"
"errors"
)
//go:embed cloud_schema.graphql
var schemaGraphql string
//go:embed cloud_schema-with-mutations.graphql
var schemaGraphqlWithMutations string
func (t *Tool) handleResourceCloud() (string, error) {
if t.cfg.Cloud == nil {
return "", errors.New("nhost cloud is not configured") //nolint:err113
}
schema := schemaGraphql
if t.cfg.Cloud.EnableMutations {
schema = schemaGraphqlWithMutations
}
return schema, nil
}

View File

@@ -0,0 +1,9 @@
package schemas
import (
"github.com/nhost/nhost/cli/mcp/nhost/graphql"
)
func (t *Tool) handleGraphqlManagementSchema() string {
return graphql.Schema
}

View File

@@ -0,0 +1,13 @@
package schemas
import (
_ "embed"
)
//go:embed nhost_toml_schema.cue
var schemaNhostToml string
//go:generate cp ../../../../vendor/github.com/nhost/be/services/mimir/schema/schema.cue nhost_toml_schema.cue
func (t *Tool) handleSchemaNhostToml() string {
return schemaNhostToml
}

View File

@@ -148,7 +148,7 @@ import (
#Hasura: {
// Version of hasura, you can see available versions in the URL below:
// https://hub.docker.com/r/hasura/graphql-engine/tags
version: string | *"v2.48.5-ce"
version: string | *"v2.46.0-ce"
// JWT Secrets configuration
jwtSecrets: [#JWTSecret]
@@ -223,7 +223,7 @@ import (
// Releases:
//
// https://github.com/nhost/hasura-storage/releases
version: string | *"0.9.1"
version: string | *"0.7.2"
// Networking (custom domains at the moment) are not allowed as we need to do further
// configurations in the CDN. We will enable it again in the future.
@@ -311,7 +311,7 @@ import (
// Releases:
//
// https://github.com/nhost/hasura-auth/releases
version: string | *"0.43.0"
version: string | *"0.38.1"
// Resources for the service
resources?: #Resources
@@ -651,9 +651,6 @@ import (
iops: uint32 | *3000
tput: uint32 | *125
}
encryptColumnKey?: string & =~"^[0-9a-fA-F]{64}$" // 32 bytes hex-encoded key
oldEncryptColumnKey?: string & =~"^[0-9a-fA-F]{64}$" // for key rotation
}
persistentVolumesEncrypted: bool | *false

View File

@@ -25,28 +25,8 @@ type GetGraphqlSchemaRequest struct {
ProjectSubdomain string `json:"projectSubdomain"`
}
func toQueries(q []string) []graphql.Queries {
if q == nil {
return nil
}
queries := make([]graphql.Queries, len(q))
for i, v := range q {
queries[i] = graphql.Queries{
Name: v,
DisableNesting: false,
}
}
return queries
}
func (t *Tool) handleProjectGraphqlSchema(
ctx context.Context,
role string,
subdomain string,
summary bool,
queries, mutations []string,
ctx context.Context, role string, subdomain string,
) (string, error) {
project, err := t.cfg.Projects.Get(subdomain)
if err != nil {
@@ -70,25 +50,20 @@ func (t *Tool) handleProjectGraphqlSchema(
graphql.IntrospectionQuery,
nil,
&introspection,
[]string{"*"},
nil,
nil,
interceptors...,
); err != nil {
return "", fmt.Errorf("failed to query GraphQL schema: %w", err)
}
var schema string
if summary {
schema = graphql.SummarizeSchema(introspection)
} else {
schema = graphql.ParseSchema(
introspection,
graphql.Filter{
AllowQueries: toQueries(queries),
AllowMutations: toQueries(mutations),
},
)
}
schema := graphql.ParseSchema(
introspection,
graphql.Filter{
AllowQueries: nil,
AllowMutations: nil,
},
)
return schema, nil
}

View File

@@ -14,6 +14,13 @@ const (
Get GraphQL/API schemas to interact with various services. Use the "service" parameter to
specify which schema you want. Supported services are:
- nhost: This is the schema to interact with the Nhost Cloud. Projects are equivalent
to apps in the schema. IDs are typically uuids.
- config-schema: Get Cuelang schema for the nhost.toml configuration file. Run nhost
config validate after making changes to your nhost.toml file to ensure it is valid.
- graphql-management: GraphQL's management schema for an Nhost development project
running locally via the Nhost CLI. This tool is useful to properly understand how
manage hasura metadata, migrations, permissions, remote schemas, etc.
- project: Get GraphQL schema for an Nhost project. The "subdomain"
parameter is required to specify which project to get the schema for. The "role"
parameter can be passed to specify the role to use when fetching the schema (defaults
@@ -46,12 +53,17 @@ func (t *Tool) Register(mcpServer *server.MCPServer) {
OpenWorldHint: ptr(true),
},
),
mcp.WithString(
"service",
mcp.Enum("nhost", "config-schema", "graphql-management", "project"),
mcp.Required(),
),
mcp.WithString(
"role",
mcp.Description(
"role to use when executing queries. Keep in mind the schema depends on the role so if you retrieved the schema for a different role previously retrieve it for this role beforehand as it might differ", //nolint:lll
"Role to use when fetching the schema. Useful only services `local` and `project`",
),
mcp.Required(),
mcp.DefaultString("user"),
),
mcp.WithString(
"subdomain",
@@ -59,22 +71,6 @@ func (t *Tool) Register(mcpServer *server.MCPServer) {
"Project to get the GraphQL schema for. Required when service is `project`",
),
mcp.Enum(t.cfg.Projects.Subdomains()...),
mcp.Required(),
),
mcp.WithBoolean(
"summary",
mcp.Description("only return a summary of the schema"),
mcp.DefaultBool(true),
),
mcp.WithArray(
"queries",
mcp.WithStringItems(),
mcp.Description("list of queries to fetch"),
),
mcp.WithArray(
"mutations",
mcp.WithStringItems(),
mcp.Description("list of mutations to fetch"),
),
)
@@ -82,19 +78,31 @@ func (t *Tool) Register(mcpServer *server.MCPServer) {
}
type HandleRequest struct {
Role string `json:"role,omitempty"`
Subdomain string `json:"subdomain,omitempty"`
Summary bool `json:"summary,omitempty"`
Queries []string `json:"queries,omitempty"`
Mutations []string `json:"mutations,omitempty"`
Service string `json:"service"`
Role string `json:"role,omitempty"`
Subdomain string `json:"subdomain,omitempty"`
}
func (t *Tool) handle(
ctx context.Context, _ mcp.CallToolRequest, args HandleRequest,
) (*mcp.CallToolResult, error) {
schema, err := t.handleProjectGraphqlSchema(
ctx, args.Role, args.Subdomain, args.Summary, args.Queries, args.Mutations,
var (
schema string
err error
)
switch args.Service {
case "nhost":
schema, err = t.handleResourceCloud()
case "local-config-server":
schema = t.handleSchemaNhostToml()
case "graphql-management":
schema = t.handleGraphqlManagementSchema()
case "project":
schema, err = t.handleProjectGraphqlSchema(ctx, args.Role, args.Subdomain)
default:
return mcp.NewToolResultError("unknown service: " + args.Service), nil
}
if err != nil {
return mcp.NewToolResultError(err.Error()), nil
}

View File

@@ -70,28 +70,18 @@ type ConfigAIUpdateInput struct {
WebhookSecret *string `json:"webhookSecret,omitempty"`
}
// Configuration for auth service
// You can find more information about the configuration here:
// https://github.com/nhost/hasura-auth/blob/main/docs/environment-variables.md
type ConfigAuth struct {
ElevatedPrivileges *ConfigAuthElevatedPrivileges `json:"elevatedPrivileges,omitempty"`
Method *ConfigAuthMethod `json:"method,omitempty"`
Misc *ConfigAuthMisc `json:"misc,omitempty"`
RateLimit *ConfigAuthRateLimit `json:"rateLimit,omitempty"`
Redirections *ConfigAuthRedirections `json:"redirections,omitempty"`
// Resources for the service
Resources *ConfigResources `json:"resources,omitempty"`
Session *ConfigAuthSession `json:"session,omitempty"`
SignUp *ConfigAuthSignUp `json:"signUp,omitempty"`
Totp *ConfigAuthTotp `json:"totp,omitempty"`
User *ConfigAuthUser `json:"user,omitempty"`
// Version of auth, you can see available versions in the URL below:
// https://hub.docker.com/r/nhost/hasura-auth/tags
//
// Releases:
//
// https://github.com/nhost/hasura-auth/releases
Version *string `json:"version,omitempty"`
Resources *ConfigResources `json:"resources,omitempty"`
Session *ConfigAuthSession `json:"session,omitempty"`
SignUp *ConfigAuthSignUp `json:"signUp,omitempty"`
Totp *ConfigAuthTotp `json:"totp,omitempty"`
User *ConfigAuthUser `json:"user,omitempty"`
Version *string `json:"version,omitempty"`
}
type ConfigAuthElevatedPrivileges struct {
@@ -121,11 +111,9 @@ type ConfigAuthMethodAnonymousUpdateInput struct {
}
type ConfigAuthMethodEmailPassword struct {
EmailVerificationRequired *bool `json:"emailVerificationRequired,omitempty"`
// Disabling email+password sign in is not implmented yet
// enabled: bool | *true
HibpEnabled *bool `json:"hibpEnabled,omitempty"`
PasswordMinLength *uint32 `json:"passwordMinLength,omitempty"`
EmailVerificationRequired *bool `json:"emailVerificationRequired,omitempty"`
HibpEnabled *bool `json:"hibpEnabled,omitempty"`
PasswordMinLength *uint32 `json:"passwordMinLength,omitempty"`
}
type ConfigAuthMethodEmailPasswordUpdateInput struct {
@@ -347,10 +335,8 @@ type ConfigAuthRateLimitUpdateInput struct {
}
type ConfigAuthRedirections struct {
// AUTH_ACCESS_CONTROL_ALLOWED_REDIRECT_URLS
AllowedUrls []string `json:"allowedUrls,omitempty"`
// AUTH_CLIENT_URL
ClientURL *string `json:"clientUrl,omitempty"`
ClientURL *string `json:"clientUrl,omitempty"`
}
type ConfigAuthRedirectionsUpdateInput struct {
@@ -364,10 +350,8 @@ type ConfigAuthSession struct {
}
type ConfigAuthSessionAccessToken struct {
// AUTH_JWT_CUSTOM_CLAIMS
CustomClaims []*ConfigAuthsessionaccessTokenCustomClaims `json:"customClaims,omitempty"`
// AUTH_ACCESS_TOKEN_EXPIRES_IN
ExpiresIn *uint32 `json:"expiresIn,omitempty"`
ExpiresIn *uint32 `json:"expiresIn,omitempty"`
}
type ConfigAuthSessionAccessTokenUpdateInput struct {
@@ -376,7 +360,6 @@ type ConfigAuthSessionAccessTokenUpdateInput struct {
}
type ConfigAuthSessionRefreshToken struct {
// AUTH_REFRESH_TOKEN_EXPIRES_IN
ExpiresIn *uint32 `json:"expiresIn,omitempty"`
}
@@ -390,11 +373,9 @@ type ConfigAuthSessionUpdateInput struct {
}
type ConfigAuthSignUp struct {
// AUTH_DISABLE_NEW_USERS
DisableNewUsers *bool `json:"disableNewUsers,omitempty"`
// Inverse of AUTH_DISABLE_SIGNUP
Enabled *bool `json:"enabled,omitempty"`
Turnstile *ConfigAuthSignUpTurnstile `json:"turnstile,omitempty"`
DisableNewUsers *bool `json:"disableNewUsers,omitempty"`
Enabled *bool `json:"enabled,omitempty"`
Turnstile *ConfigAuthSignUpTurnstile `json:"turnstile,omitempty"`
}
type ConfigAuthSignUpTurnstile struct {
@@ -444,16 +425,12 @@ type ConfigAuthUser struct {
}
type ConfigAuthUserEmail struct {
// AUTH_ACCESS_CONTROL_ALLOWED_EMAILS
Allowed []string `json:"allowed,omitempty"`
// AUTH_ACCESS_CONTROL_BLOCKED_EMAILS
Blocked []string `json:"blocked,omitempty"`
}
type ConfigAuthUserEmailDomains struct {
// AUTH_ACCESS_CONTROL_ALLOWED_EMAIL_DOMAINS
Allowed []string `json:"allowed,omitempty"`
// AUTH_ACCESS_CONTROL_BLOCKED_EMAIL_DOMAINS
Blocked []string `json:"blocked,omitempty"`
}
@@ -469,7 +446,6 @@ type ConfigAuthUserEmailUpdateInput struct {
type ConfigAuthUserGravatar struct {
Default *string `json:"default,omitempty"`
// AUTH_GRAVATAR_ENABLED
Enabled *bool `json:"enabled,omitempty"`
Rating *string `json:"rating,omitempty"`
}
@@ -481,10 +457,8 @@ type ConfigAuthUserGravatarUpdateInput struct {
}
type ConfigAuthUserLocale struct {
// AUTH_LOCALE_ALLOWED_LOCALES
Allowed []string `json:"allowed,omitempty"`
// AUTH_LOCALE_DEFAULT
Default *string `json:"default,omitempty"`
Default *string `json:"default,omitempty"`
}
type ConfigAuthUserLocaleUpdateInput struct {
@@ -493,10 +467,8 @@ type ConfigAuthUserLocaleUpdateInput struct {
}
type ConfigAuthUserRoles struct {
// AUTH_USER_DEFAULT_ALLOWED_ROLES
Allowed []string `json:"allowed,omitempty"`
// AUTH_USER_DEFAULT_ROLE
Default *string `json:"default,omitempty"`
Default *string `json:"default,omitempty"`
}
type ConfigAuthUserRolesUpdateInput struct {
@@ -512,7 +484,6 @@ type ConfigAuthUserUpdateInput struct {
Roles *ConfigAuthUserRolesUpdateInput `json:"roles,omitempty"`
}
// AUTH_JWT_CUSTOM_CLAIMS
type ConfigAuthsessionaccessTokenCustomClaims struct {
Default *string `json:"default,omitempty"`
Key string `json:"key"`
@@ -551,11 +522,8 @@ type ConfigClaimMapUpdateInput struct {
Value *string `json:"value,omitempty"`
}
// Resource configuration for a service
type ConfigComputeResources struct {
// milicpus, 1000 milicpus = 1 cpu
CPU uint32 `json:"cpu"`
// MiB: 128MiB to 30GiB
CPU uint32 `json:"cpu"`
Memory uint32 `json:"memory"`
}
@@ -569,28 +537,17 @@ type ConfigComputeResourcesUpdateInput struct {
Memory *uint32 `json:"memory,omitempty"`
}
// main entrypoint to the configuration
type ConfigConfig struct {
// Configuration for graphite service
Ai *ConfigAi `json:"ai,omitempty"`
// Configuration for auth service
Auth *ConfigAuth `json:"auth,omitempty"`
// Configuration for functions service
Functions *ConfigFunctions `json:"functions,omitempty"`
// Global configuration that applies to all services
Global *ConfigGlobal `json:"global,omitempty"`
// Advanced configuration for GraphQL
Graphql *ConfigGraphql `json:"graphql,omitempty"`
// Configuration for hasura
Hasura *ConfigHasura `json:"hasura"`
// Configuration for observability service
Ai *ConfigAi `json:"ai,omitempty"`
Auth *ConfigAuth `json:"auth,omitempty"`
Functions *ConfigFunctions `json:"functions,omitempty"`
Global *ConfigGlobal `json:"global,omitempty"`
Graphql *ConfigGraphql `json:"graphql,omitempty"`
Hasura *ConfigHasura `json:"hasura"`
Observability *ConfigObservability `json:"observability"`
// Configuration for postgres service
Postgres *ConfigPostgres `json:"postgres"`
// Configuration for third party providers like SMTP, SMS, etc.
Provider *ConfigProvider `json:"provider,omitempty"`
// Configuration for storage service
Storage *ConfigStorage `json:"storage,omitempty"`
Postgres *ConfigPostgres `json:"postgres"`
Provider *ConfigProvider `json:"provider,omitempty"`
Storage *ConfigStorage `json:"storage,omitempty"`
}
type ConfigConfigUpdateInput struct {
@@ -607,8 +564,7 @@ type ConfigConfigUpdateInput struct {
}
type ConfigEnvironmentVariable struct {
Name string `json:"name"`
// Value of the environment variable
Name string `json:"name"`
Value string `json:"value"`
}
@@ -622,7 +578,6 @@ type ConfigEnvironmentVariableUpdateInput struct {
Value *string `json:"value,omitempty"`
}
// Configuration for functions service
type ConfigFunctions struct {
Node *ConfigFunctionsNode `json:"node,omitempty"`
RateLimit *ConfigRateLimit `json:"rateLimit,omitempty"`
@@ -651,15 +606,12 @@ type ConfigFunctionsUpdateInput struct {
Resources *ConfigFunctionsResourcesUpdateInput `json:"resources,omitempty"`
}
// Global configuration that applies to all services
type ConfigGlobal struct {
// User-defined environment variables that are spread over all services
Environment []*ConfigGlobalEnvironmentVariable `json:"environment,omitempty"`
}
type ConfigGlobalEnvironmentVariable struct {
Name string `json:"name"`
// Value of the environment variable
Name string `json:"name"`
Value string `json:"value"`
}
@@ -816,34 +768,23 @@ type ConfigGraphqlUpdateInput struct {
Security *ConfigGraphqlSecurityUpdateInput `json:"security,omitempty"`
}
// Configuration for hasura service
type ConfigHasura struct {
// Admin secret
AdminSecret string `json:"adminSecret"`
AuthHook *ConfigHasuraAuthHook `json:"authHook,omitempty"`
Events *ConfigHasuraEvents `json:"events,omitempty"`
// JWT Secrets configuration
JwtSecrets []*ConfigJWTSecret `json:"jwtSecrets,omitempty"`
Logs *ConfigHasuraLogs `json:"logs,omitempty"`
RateLimit *ConfigRateLimit `json:"rateLimit,omitempty"`
// Resources for the service
Resources *ConfigResources `json:"resources,omitempty"`
// Configuration for hasura services
// Reference: https://hasura.io/docs/latest/deployment/graphql-engine-flags/reference/
Settings *ConfigHasuraSettings `json:"settings,omitempty"`
// Version of hasura, you can see available versions in the URL below:
// https://hub.docker.com/r/hasura/graphql-engine/tags
Version *string `json:"version,omitempty"`
// Webhook secret
WebhookSecret string `json:"webhookSecret"`
AdminSecret string `json:"adminSecret"`
AuthHook *ConfigHasuraAuthHook `json:"authHook,omitempty"`
Events *ConfigHasuraEvents `json:"events,omitempty"`
JwtSecrets []*ConfigJWTSecret `json:"jwtSecrets,omitempty"`
Logs *ConfigHasuraLogs `json:"logs,omitempty"`
RateLimit *ConfigRateLimit `json:"rateLimit,omitempty"`
Resources *ConfigResources `json:"resources,omitempty"`
Settings *ConfigHasuraSettings `json:"settings,omitempty"`
Version *string `json:"version,omitempty"`
WebhookSecret string `json:"webhookSecret"`
}
type ConfigHasuraAuthHook struct {
Mode *string `json:"mode,omitempty"`
// HASURA_GRAPHQL_AUTH_HOOK_SEND_REQUEST_BODY
SendRequestBody *bool `json:"sendRequestBody,omitempty"`
// HASURA_GRAPHQL_AUTH_HOOK
URL string `json:"url"`
Mode *string `json:"mode,omitempty"`
SendRequestBody *bool `json:"sendRequestBody,omitempty"`
URL string `json:"url"`
}
type ConfigHasuraAuthHookUpdateInput struct {
@@ -853,7 +794,6 @@ type ConfigHasuraAuthHookUpdateInput struct {
}
type ConfigHasuraEvents struct {
// HASURA_GRAPHQL_EVENTS_HTTP_POOL_SIZE
HTTPPoolSize *uint32 `json:"httpPoolSize,omitempty"`
}
@@ -869,27 +809,16 @@ type ConfigHasuraLogsUpdateInput struct {
Level *string `json:"level,omitempty"`
}
// Configuration for hasura services
// Reference: https://hasura.io/docs/latest/deployment/graphql-engine-flags/reference/
type ConfigHasuraSettings struct {
// HASURA_GRAPHQL_CORS_DOMAIN
CorsDomain []string `json:"corsDomain,omitempty"`
// HASURA_GRAPHQL_DEV_MODE
DevMode *bool `json:"devMode,omitempty"`
// HASURA_GRAPHQL_ENABLE_ALLOWLIST
EnableAllowList *bool `json:"enableAllowList,omitempty"`
// HASURA_GRAPHQL_ENABLE_CONSOLE
EnableConsole *bool `json:"enableConsole,omitempty"`
// HASURA_GRAPHQL_ENABLE_REMOTE_SCHEMA_PERMISSIONS
EnableRemoteSchemaPermissions *bool `json:"enableRemoteSchemaPermissions,omitempty"`
// HASURA_GRAPHQL_ENABLED_APIS
EnabledAPIs []string `json:"enabledAPIs,omitempty"`
// HASURA_GRAPHQL_INFER_FUNCTION_PERMISSIONS
InferFunctionPermissions *bool `json:"inferFunctionPermissions,omitempty"`
// HASURA_GRAPHQL_LIVE_QUERIES_MULTIPLEXED_REFETCH_INTERVAL
LiveQueriesMultiplexedRefetchInterval *uint32 `json:"liveQueriesMultiplexedRefetchInterval,omitempty"`
// HASURA_GRAPHQL_STRINGIFY_NUMERIC_TYPES
StringifyNumericTypes *bool `json:"stringifyNumericTypes,omitempty"`
CorsDomain []string `json:"corsDomain,omitempty"`
DevMode *bool `json:"devMode,omitempty"`
EnableAllowList *bool `json:"enableAllowList,omitempty"`
EnableConsole *bool `json:"enableConsole,omitempty"`
EnableRemoteSchemaPermissions *bool `json:"enableRemoteSchemaPermissions,omitempty"`
EnabledAPIs []string `json:"enabledAPIs,omitempty"`
InferFunctionPermissions *bool `json:"inferFunctionPermissions,omitempty"`
LiveQueriesMultiplexedRefetchInterval *uint32 `json:"liveQueriesMultiplexedRefetchInterval,omitempty"`
StringifyNumericTypes *bool `json:"stringifyNumericTypes,omitempty"`
}
type ConfigHasuraSettingsUpdateInput struct {
@@ -962,7 +891,6 @@ type ConfigIngressUpdateInput struct {
TLS *ConfigIngressTLSUpdateInput `json:"tls,omitempty"`
}
// See https://hasura.io/docs/latest/auth/authentication/jwt/
type ConfigJWTSecret struct {
AllowedSkew *uint32 `json:"allowed_skew,omitempty"`
Audience *string `json:"audience,omitempty"`
@@ -1011,15 +939,11 @@ type ConfigObservabilityUpdateInput struct {
Grafana *ConfigGrafanaUpdateInput `json:"grafana,omitempty"`
}
// Configuration for postgres service
type ConfigPostgres struct {
Pitr *ConfigPostgresPitr `json:"pitr,omitempty"`
// Resources for the service
Pitr *ConfigPostgresPitr `json:"pitr,omitempty"`
Resources *ConfigPostgresResources `json:"resources"`
Settings *ConfigPostgresSettings `json:"settings,omitempty"`
// Version of postgres, you can see available versions in the URL below:
// https://hub.docker.com/r/nhost/postgres/tags
Version *string `json:"version,omitempty"`
Version *string `json:"version,omitempty"`
}
type ConfigPostgresPitr struct {
@@ -1030,7 +954,6 @@ type ConfigPostgresPitrUpdateInput struct {
Retention *uint32 `json:"retention,omitempty"`
}
// Resources for the service
type ConfigPostgresResources struct {
Compute *ConfigResourcesCompute `json:"compute,omitempty"`
EnablePublicAccess *bool `json:"enablePublicAccess,omitempty"`
@@ -1137,19 +1060,15 @@ type ConfigRateLimitUpdateInput struct {
Limit *uint32 `json:"limit,omitempty"`
}
// Resource configuration for a service
type ConfigResources struct {
Autoscaler *ConfigAutoscaler `json:"autoscaler,omitempty"`
Compute *ConfigResourcesCompute `json:"compute,omitempty"`
Networking *ConfigNetworking `json:"networking,omitempty"`
// Number of replicas for a service
Replicas *uint32 `json:"replicas,omitempty"`
Replicas *uint32 `json:"replicas,omitempty"`
}
type ConfigResourcesCompute struct {
// milicpus, 1000 milicpus = 1 cpu
CPU uint32 `json:"cpu"`
// MiB: 128MiB to 30GiB
CPU uint32 `json:"cpu"`
Memory uint32 `json:"memory"`
}
@@ -1201,8 +1120,7 @@ type ConfigRunServiceConfigWithID struct {
}
type ConfigRunServiceImage struct {
Image string `json:"image"`
// content of "auths", i.e., { "auths": $THIS }
Image string `json:"image"`
PullCredentials *string `json:"pullCredentials,omitempty"`
}
@@ -1240,13 +1158,11 @@ type ConfigRunServicePortUpdateInput struct {
Type *string `json:"type,omitempty"`
}
// Resource configuration for a service
type ConfigRunServiceResources struct {
Autoscaler *ConfigAutoscaler `json:"autoscaler,omitempty"`
Compute *ConfigComputeResources `json:"compute"`
// Number of replicas for a service
Replicas uint32 `json:"replicas"`
Storage []*ConfigRunServiceResourcesStorage `json:"storage,omitempty"`
Autoscaler *ConfigAutoscaler `json:"autoscaler,omitempty"`
Compute *ConfigComputeResources `json:"compute"`
Replicas uint32 `json:"replicas"`
Storage []*ConfigRunServiceResourcesStorage `json:"storage,omitempty"`
}
type ConfigRunServiceResourcesInsertInput struct {
@@ -1257,11 +1173,9 @@ type ConfigRunServiceResourcesInsertInput struct {
}
type ConfigRunServiceResourcesStorage struct {
// GiB
Capacity uint32 `json:"capacity"`
// name of the volume, changing it will cause data loss
Name string `json:"name"`
Path string `json:"path"`
Name string `json:"name"`
Path string `json:"path"`
}
type ConfigRunServiceResourcesStorageInsertInput struct {
@@ -1345,20 +1259,11 @@ type ConfigStandardOauthProviderWithScopeUpdateInput struct {
Scope []string `json:"scope,omitempty"`
}
// Configuration for storage service
type ConfigStorage struct {
Antivirus *ConfigStorageAntivirus `json:"antivirus,omitempty"`
RateLimit *ConfigRateLimit `json:"rateLimit,omitempty"`
// Networking (custom domains at the moment) are not allowed as we need to do further
// configurations in the CDN. We will enable it again in the future.
Resources *ConfigResources `json:"resources,omitempty"`
// Version of storage service, you can see available versions in the URL below:
// https://hub.docker.com/r/nhost/hasura-storage/tags
//
// Releases:
//
// https://github.com/nhost/hasura-storage/releases
Version *string `json:"version,omitempty"`
Resources *ConfigResources `json:"resources,omitempty"`
Version *string `json:"version,omitempty"`
}
type ConfigStorageAntivirus struct {
@@ -1396,8 +1301,6 @@ type ConfigSystemConfigAuthEmailTemplates struct {
}
type ConfigSystemConfigGraphql struct {
// manually enable graphi on a per-service basis
// by default it follows the plan
FeatureAdvancedGraphql *bool `json:"featureAdvancedGraphql,omitempty"`
}
@@ -1815,8 +1718,7 @@ type Apps struct {
AppStates []*AppStateHistory `json:"appStates"`
AutomaticDeploys bool `json:"automaticDeploys"`
// An array relationship
Backups []*Backups `json:"backups"`
// main entrypoint to the configuration
Backups []*Backups `json:"backups"`
Config *ConfigConfig `json:"config,omitempty"`
CreatedAt time.Time `json:"createdAt"`
// An object relationship
@@ -2247,14 +2149,6 @@ type AuthUserProvidersMinOrderBy struct {
ProviderID *OrderBy `json:"providerId,omitempty"`
}
// response of any mutation on the table "auth.user_providers"
type AuthUserProvidersMutationResponse struct {
// number of rows affected by the mutation
AffectedRows int64 `json:"affected_rows"`
// data from the rows affected by the mutation
Returning []*AuthUserProviders `json:"returning"`
}
// Ordering options when selecting data from "auth.user_providers".
type AuthUserProvidersOrderBy struct {
ID *OrderBy `json:"id,omitempty"`
@@ -2831,7 +2725,6 @@ type Deployments struct {
CommitSha string `json:"commitSHA"`
CommitUserAvatarURL *string `json:"commitUserAvatarUrl,omitempty"`
CommitUserName *string `json:"commitUserName,omitempty"`
CreatedAt time.Time `json:"createdAt"`
DeploymentEndedAt *time.Time `json:"deploymentEndedAt,omitempty"`
// An array relationship
DeploymentLogs []*DeploymentLogs `json:"deploymentLogs"`
@@ -2874,7 +2767,6 @@ type DeploymentsBoolExp struct {
CommitSha *StringComparisonExp `json:"commitSHA,omitempty"`
CommitUserAvatarURL *StringComparisonExp `json:"commitUserAvatarUrl,omitempty"`
CommitUserName *StringComparisonExp `json:"commitUserName,omitempty"`
CreatedAt *TimestamptzComparisonExp `json:"createdAt,omitempty"`
DeploymentEndedAt *TimestamptzComparisonExp `json:"deploymentEndedAt,omitempty"`
DeploymentLogs *DeploymentLogsBoolExp `json:"deploymentLogs,omitempty"`
DeploymentStartedAt *TimestamptzComparisonExp `json:"deploymentStartedAt,omitempty"`
@@ -2909,7 +2801,6 @@ type DeploymentsMaxOrderBy struct {
CommitSha *OrderBy `json:"commitSHA,omitempty"`
CommitUserAvatarURL *OrderBy `json:"commitUserAvatarUrl,omitempty"`
CommitUserName *OrderBy `json:"commitUserName,omitempty"`
CreatedAt *OrderBy `json:"createdAt,omitempty"`
DeploymentEndedAt *OrderBy `json:"deploymentEndedAt,omitempty"`
DeploymentStartedAt *OrderBy `json:"deploymentStartedAt,omitempty"`
DeploymentStatus *OrderBy `json:"deploymentStatus,omitempty"`
@@ -2932,7 +2823,6 @@ type DeploymentsMinOrderBy struct {
CommitSha *OrderBy `json:"commitSHA,omitempty"`
CommitUserAvatarURL *OrderBy `json:"commitUserAvatarUrl,omitempty"`
CommitUserName *OrderBy `json:"commitUserName,omitempty"`
CreatedAt *OrderBy `json:"createdAt,omitempty"`
DeploymentEndedAt *OrderBy `json:"deploymentEndedAt,omitempty"`
DeploymentStartedAt *OrderBy `json:"deploymentStartedAt,omitempty"`
DeploymentStatus *OrderBy `json:"deploymentStatus,omitempty"`
@@ -2971,7 +2861,6 @@ type DeploymentsOrderBy struct {
CommitSha *OrderBy `json:"commitSHA,omitempty"`
CommitUserAvatarURL *OrderBy `json:"commitUserAvatarUrl,omitempty"`
CommitUserName *OrderBy `json:"commitUserName,omitempty"`
CreatedAt *OrderBy `json:"createdAt,omitempty"`
DeploymentEndedAt *OrderBy `json:"deploymentEndedAt,omitempty"`
DeploymentLogsAggregate *DeploymentLogsAggregateOrderBy `json:"deploymentLogs_aggregate,omitempty"`
DeploymentStartedAt *OrderBy `json:"deploymentStartedAt,omitempty"`
@@ -3003,7 +2892,6 @@ type DeploymentsStreamCursorValueInput struct {
CommitSha *string `json:"commitSHA,omitempty"`
CommitUserAvatarURL *string `json:"commitUserAvatarUrl,omitempty"`
CommitUserName *string `json:"commitUserName,omitempty"`
CreatedAt *time.Time `json:"createdAt,omitempty"`
DeploymentEndedAt *time.Time `json:"deploymentEndedAt,omitempty"`
DeploymentStartedAt *time.Time `json:"deploymentStartedAt,omitempty"`
DeploymentStatus *string `json:"deploymentStatus,omitempty"`
@@ -6997,8 +6885,6 @@ const (
// column name
DeploymentsSelectColumnCommitUserName DeploymentsSelectColumn = "commitUserName"
// column name
DeploymentsSelectColumnCreatedAt DeploymentsSelectColumn = "createdAt"
// column name
DeploymentsSelectColumnDeploymentEndedAt DeploymentsSelectColumn = "deploymentEndedAt"
// column name
DeploymentsSelectColumnDeploymentStartedAt DeploymentsSelectColumn = "deploymentStartedAt"
@@ -7032,7 +6918,6 @@ var AllDeploymentsSelectColumn = []DeploymentsSelectColumn{
DeploymentsSelectColumnCommitSha,
DeploymentsSelectColumnCommitUserAvatarURL,
DeploymentsSelectColumnCommitUserName,
DeploymentsSelectColumnCreatedAt,
DeploymentsSelectColumnDeploymentEndedAt,
DeploymentsSelectColumnDeploymentStartedAt,
DeploymentsSelectColumnDeploymentStatus,
@@ -7050,7 +6935,7 @@ var AllDeploymentsSelectColumn = []DeploymentsSelectColumn{
func (e DeploymentsSelectColumn) IsValid() bool {
switch e {
case DeploymentsSelectColumnAppID, DeploymentsSelectColumnCommitMessage, DeploymentsSelectColumnCommitSha, DeploymentsSelectColumnCommitUserAvatarURL, DeploymentsSelectColumnCommitUserName, DeploymentsSelectColumnCreatedAt, DeploymentsSelectColumnDeploymentEndedAt, DeploymentsSelectColumnDeploymentStartedAt, DeploymentsSelectColumnDeploymentStatus, DeploymentsSelectColumnFunctionsEndedAt, DeploymentsSelectColumnFunctionsStartedAt, DeploymentsSelectColumnFunctionsStatus, DeploymentsSelectColumnID, DeploymentsSelectColumnMetadataEndedAt, DeploymentsSelectColumnMetadataStartedAt, DeploymentsSelectColumnMetadataStatus, DeploymentsSelectColumnMigrationsEndedAt, DeploymentsSelectColumnMigrationsStartedAt, DeploymentsSelectColumnMigrationsStatus:
case DeploymentsSelectColumnAppID, DeploymentsSelectColumnCommitMessage, DeploymentsSelectColumnCommitSha, DeploymentsSelectColumnCommitUserAvatarURL, DeploymentsSelectColumnCommitUserName, DeploymentsSelectColumnDeploymentEndedAt, DeploymentsSelectColumnDeploymentStartedAt, DeploymentsSelectColumnDeploymentStatus, DeploymentsSelectColumnFunctionsEndedAt, DeploymentsSelectColumnFunctionsStartedAt, DeploymentsSelectColumnFunctionsStatus, DeploymentsSelectColumnID, DeploymentsSelectColumnMetadataEndedAt, DeploymentsSelectColumnMetadataStartedAt, DeploymentsSelectColumnMetadataStatus, DeploymentsSelectColumnMigrationsEndedAt, DeploymentsSelectColumnMigrationsStartedAt, DeploymentsSelectColumnMigrationsStatus:
return true
}
return false

View File

@@ -27,9 +27,9 @@ let
"${submodule}/mcp/nhost/auth/openapi.yaml"
"${submodule}/mcp/nhost/graphql/openapi.yaml"
"${submodule}/mcp/resources/cloud_schema.graphql"
"${submodule}/mcp/resources/cloud_schema-with-mutations.graphql"
"${submodule}/mcp/resources/nhost_toml_schema.cue"
"${submodule}/mcp/tools/schemas/cloud_schema.graphql"
"${submodule}/mcp/tools/schemas/cloud_schema-with-mutations.graphql"
"${submodule}/mcp/tools/schemas/nhost_toml_schema.cue"
(inDirectory "${submodule}/cmd/mcp/testdata")
(inDirectory "${submodule}/mcp/graphql/testdata")
];

View File

@@ -1,27 +1,27 @@
-----BEGIN CERTIFICATE-----
MIIERTCCA8ugAwIBAgISBWD/E+b14mP5jv4DGWRVYv8fMAoGCCqGSM49BAMDMDIx
MIIERDCCA8mgAwIBAgISBmRex3kpZ4Mz1/1kq05iqja/MAoGCCqGSM49BAMDMDIx
CzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1MZXQncyBFbmNyeXB0MQswCQYDVQQDEwJF
ODAeFw0yNTExMDYxMDUxMTBaFw0yNjAyMDQxMDUxMDlaMB8xHTAbBgNVBAMTFGxv
Y2FsLmF1dGgubmhvc3QucnVuMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEOah5
ZLuUQp3pdMBxBWnT6E6/amW9LerKKEEdy3Nc8iAwG9LlnPH0z3m7a9wgEhpFEdlL
Rr+qO+NhSRnv6+UF5KOCAtIwggLOMA4GA1UdDwEB/wQEAwIHgDAdBgNVHSUEFjAU
BggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUGyb1
TVK/0vf3uHO4x3R094aG2rEwHwYDVR0jBBgwFoAUjw0TovYuftFQbDMYOF1ZjiNy
ODAeFw0yNTEwMDIxMDUxNDBaFw0yNTEyMzExMDUxMzlaMB8xHTAbBgNVBAMTFGxv
Y2FsLmF1dGgubmhvc3QucnVuMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2cVM
ojf8iXZGLneNfnke5LMJIxyTEeGbNOfCv4SOR4K/N4OkpvkUVbH2bRvX99uE9jaK
515Y48PzPA/4+W1zTKOCAtAwggLMMA4GA1UdDwEB/wQEAwIHgDAdBgNVHSUEFjAU
BggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUQqan
raZoU5klAxsgkEVEMIkxmMQwHwYDVR0jBBgwFoAUjw0TovYuftFQbDMYOF1ZjiNy
kcowMgYIKwYBBQUHAQEEJjAkMCIGCCsGAQUFBzAChhZodHRwOi8vZTguaS5sZW5j
ci5vcmcvMIHOBgNVHREEgcYwgcOCFGxvY2FsLmF1dGgubmhvc3QucnVughlsb2Nh
bC5kYXNoYm9hcmQubmhvc3QucnVughJsb2NhbC5kYi5uaG9zdC5ydW6CGWxvY2Fs
LmZ1bmN0aW9ucy5uaG9zdC5ydW6CF2xvY2FsLmdyYXBocWwubmhvc3QucnVughZs
b2NhbC5oYXN1cmEubmhvc3QucnVughdsb2NhbC5tYWlsaG9nLm5ob3N0LnJ1boIX
bG9jYWwuc3RvcmFnZS5uaG9zdC5ydW4wEwYDVR0gBAwwCjAIBgZngQwBAgEwLQYD
VR0fBCYwJDAioCCgHoYcaHR0cDovL2U4LmMubGVuY3Iub3JnLzMyLmNybDCCAQQG
CisGAQQB1nkCBAIEgfUEgfIA8AB2ABmG1Mcoqm/+ugNveCpNAZGqzi1yMQ+uzl1w
QS0lTMfUAAABmlkAQokAAAQDAEcwRQIgWDtSxJfM2xcjvScVHOkn8bipzBhNhTnm
B89TDh1/4XUCIQDe08W33PCx2D+akCdW9U9mZKQpIW6deLZSI3ZWpSNKMAB2AA5X
lLzzrqk+MxssmQez95Dfm8I9cTIl3SGpJaxhxU4hAAABmlkAQn8AAAQDAEcwRQIg
KnojmNTpNk1OFTQI0EnlPa2bpwqmUgmUCLeqE6SWfgoCIQCrhZbxYPHbGLF/HpRq
vCTcOh24SRCuxlkqtaowbbfmKjAKBggqhkjOPQQDAwNoADBlAjEArstFIC+KAsfQ
nLhtqsaNzkhftN5adDyr2CoE0WUPF1sLDi+xDnDO+JgIPL0YKAFNAjATJ4omhpc+
I6/kWcef2RyO9YCGQQE9pdez5CYKb9o8YAntDSHM3b5nXXj3AX/USdQ=
VR0fBCYwJDAioCCgHoYcaHR0cDovL2U4LmMubGVuY3Iub3JnLzY0LmNybDCCAQIG
CisGAQQB1nkCBAIEgfMEgfAA7gB1AO08S9boBsKkogBX28sk4jgB31Ev7cSGxXAP
IN23Pj/gAAABmaTCI4YAAAQDAEYwRAIgXLRFL1EAXfvN6kd5m6udqlxfz4+5B6rq
Cdhp/ZwDAZ8CIFYvalTkl5NEBEMD3vpPvrj8s1Yy2xsropEh/AvpavvLAHUAGYbU
xyiqb/66A294Kk0BkarOLXIxD67OXXBBLSVMx9QAAAGZpMIjhwAABAMARjBEAiBk
H1vqU9HNuBcf4UYL/xZ42BeUAARHStiFaIZtnR1kEgIgbIJ0CGqIpxmWuwCunl9p
ar+rGLdQrCk9BZXq/VjPPAAwCgYIKoZIzj0EAwMDaQAwZgIxAKvk5a2zQsv7JLNj
NO1ly+DI8qiy5nf4HQrOrHOjtmx5RUu0HSO9P0J0u069qAqXMgIxAMLdME9JUo2c
TJo3pwWv5MRyg/MkOJ4ImKdDJXfIZNkEIUyP3vwTqImvZe07gJDsYg==
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEVjCCAj6gAwIBAgIQY5WTY8JOcIJxWRi/w9ftVjANBgkqhkiG9w0BAQsFADBP

View File

@@ -1,5 +1,5 @@
-----BEGIN PRIVATE KEY-----
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgInXN4JRnXNTjx7rM
avurZrN1EV1iebQeNUlMlFp7VJ+hRANCAAQ5qHlku5RCnel0wHEFadPoTr9qZb0t
6sooQR3Lc1zyIDAb0uWc8fTPebtr3CASGkUR2UtGv6o742FJGe/r5QXk
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgfJZOkvawA0vBMw9W
ph8i1Z+SJQrFscPbqSYpxngzEDahRANCAATZxUyiN/yJdkYud41+eR7kswkjHJMR
4Zs058K/hI5Hgr83g6Sm+RRVsfZtG9f324T2NornXljjw/M8D/j5bXNM
-----END PRIVATE KEY-----

View File

@@ -1,52 +1,52 @@
-----BEGIN CERTIFICATE-----
MIIEVzCCA92gAwIBAgISBm54VdkoqD8s8efq7ceHaTihMAoGCCqGSM49BAMDMDIx
MIIEWDCCA96gAwIBAgISBbvrSsjDQm4zevwwjxFGmeTMMAoGCCqGSM49BAMDMDIx
CzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1MZXQncyBFbmNyeXB0MQswCQYDVQQDEwJF
ODAeFw0yNTExMDYxMDUyMjBaFw0yNjAyMDQxMDUyMTlaMCExHzAdBgNVBAMMFiou
YXV0aC5sb2NhbC5uaG9zdC5ydW4wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAASI
rTkZOM4ip42DCyDADXGc7oV3+OkimyTM3st2RIZWG28rFRwH0LebJV2cduq1Hdtl
VxIEr+RhvyIL7gllueXUo4IC4jCCAt4wDgYDVR0PAQH/BAQDAgeAMB0GA1UdJQQW
MBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0GA1UdDgQWBBTw
bM86O381+aljU3oTUvwhZ90PCDAfBgNVHSMEGDAWgBSPDROi9i5+0VBsMxg4XVmO
I3KRyjAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAKGFmh0dHA6Ly9lOC5pLmxl
NzAeFw0yNTEwMDIxMDUyNTdaFw0yNTEyMzExMDUyNTZaMCExHzAdBgNVBAMMFiou
YXV0aC5sb2NhbC5uaG9zdC5ydW4wWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAATG
x0o7t0pSrOoFc+pljtqJVxgaSW+w9D9C2WdysMeSKKOU+0MzaM4ynLUhETOpBs8E
612mdcoeak+G1Emj6UVwo4IC4zCCAt8wDgYDVR0PAQH/BAQDAgeAMB0GA1UdJQQW
MBQGCCsGAQUFBwMBBggrBgEFBQcDAjAMBgNVHRMBAf8EAjAAMB0GA1UdDgQWBBQ+
lVsLiXSRLAECs9OgkCEBS7jMmzAfBgNVHSMEGDAWgBSuSJ7chx1EoG/aouVgdAR4
wpwAgDAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAKGFmh0dHA6Ly9lNy5pLmxl
bmNyLm9yZy8wgd4GA1UdEQSB1jCB04IWKi5hdXRoLmxvY2FsLm5ob3N0LnJ1boIb
Ki5kYXNoYm9hcmQubG9jYWwubmhvc3QucnVughQqLmRiLmxvY2FsLm5ob3N0LnJ1
boIbKi5mdW5jdGlvbnMubG9jYWwubmhvc3QucnVughkqLmdyYXBocWwubG9jYWwu
bmhvc3QucnVughgqLmhhc3VyYS5sb2NhbC5uaG9zdC5ydW6CGSoubWFpbGhvZy5s
b2NhbC5uaG9zdC5ydW6CGSouc3RvcmFnZS5sb2NhbC5uaG9zdC5ydW4wEwYDVR0g
BAwwCjAIBgZngQwBAgEwLQYDVR0fBCYwJDAioCCgHoYcaHR0cDovL2U4LmMubGVu
Y3Iub3JnLzM0LmNybDCCAQQGCisGAQQB1nkCBAIEgfUEgfIA8AB2AEmcm2neHXzs
/DbezYdkprhbrwqHgBnRVVL76esp3fjDAAABmlkBVgkAAAQDAEcwRQIhANH6Ml3u
IM4nAzwAIjIjBjn8EWbn1ZHfgwO+rlSo5rzpAiATPKE8Mx5LK1IayG5VCK1eCDyc
rzt1HNbP9WSrpuHx+gB2ABmG1Mcoqm/+ugNveCpNAZGqzi1yMQ+uzl1wQS0lTMfU
AAABmlkBVgcAAAQDAEcwRQIgIT/DhsIj9Aw7qf/2lknJCr907dEqC3/+QN3zlcOj
iKoCIQCTguinYjJPZwU2dblaRQ2q7MTCMT2ZENExltxwYG3GzjAKBggqhkjOPQQD
AwNoADBlAjEA5nFoNrLyeC079YpRvdah/HZIA/lUBh+LOo/NcEBD3aTGs2z8hU8z
H4vMy3OnfQ9TAjBxigm7zE5/3CAcGoSOr/P0TL52nh+lO4SUVxcbKgYB8A2yo6o/
kUkG7PiRB0uUpNw=
BAwwCjAIBgZngQwBAgEwLQYDVR0fBCYwJDAioCCgHoYcaHR0cDovL2U3LmMubGVu
Y3Iub3JnLzc3LmNybDCCAQUGCisGAQQB1nkCBAIEgfYEgfMA8QB2AN3cyjSV1+EW
BeeVMvrHn/g9HFDf2wA6FBJ2Ciysu8gqAAABmaTDUHkAAAQDAEcwRQIgWudJ8XKA
BT5jq5Tl0xQLNb953pBi22Tb0TIWk+RSqHgCIQDsTrLVMFaQTV7EFCY1tFhi5qae
SCpEwwdFcnom/nz6EAB3AO08S9boBsKkogBX28sk4jgB31Ev7cSGxXAPIN23Pj/g
AAABmaTDWAsAAAQDAEgwRgIhALxIgIiutEwgNcGw7/cAdjFqUugct4HlZezIOLLP
rg69AiEA8YCaK41rJDYztEKUIJEq2J2ktSqGYcl9gNKC+SiR4acwCgYIKoZIzj0E
AwMDaAAwZQIwVG9yOiMRfKFFyFj1R8X/5U67QD84OhZ0oM0SZsVhezLedG5b8eFf
/cWraREi8xbFAjEA/6RXweGzl08F7EtqBDoiqitScI2rbwGtP6s/evL0zXTABZD2
ih7AGxjtg80IqIRe
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIEVjCCAj6gAwIBAgIQY5WTY8JOcIJxWRi/w9ftVjANBgkqhkiG9w0BAQsFADBP
MQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFy
Y2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMTAeFw0yNDAzMTMwMDAwMDBa
Fw0yNzAzMTIyMzU5NTlaMDIxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1MZXQncyBF
bmNyeXB0MQswCQYDVQQDEwJFODB2MBAGByqGSM49AgEGBSuBBAAiA2IABNFl8l7c
S7QMApzSsvru6WyrOq44ofTUOTIzxULUzDMMNMchIJBwXOhiLxxxs0LXeb5GDcHb
R6EToMffgSZjO9SNHfY9gjMy9vQr5/WWOrQTZxh7az6NSNnq3u2ubT6HTKOB+DCB
9TAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0lBBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMB
MBIGA1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFI8NE6L2Ln7RUGwzGDhdWY4j
cpHKMB8GA1UdIwQYMBaAFHm0WeZ7tuXkAXOACIjIGlj26ZtuMDIGCCsGAQUFBwEB
BCYwJDAiBggrBgEFBQcwAoYWaHR0cDovL3gxLmkubGVuY3Iub3JnLzATBgNVHSAE
DDAKMAgGBmeBDAECATAnBgNVHR8EIDAeMBygGqAYhhZodHRwOi8veDEuYy5sZW5j
ci5vcmcvMA0GCSqGSIb3DQEBCwUAA4ICAQBnE0hGINKsCYWi0Xx1ygxD5qihEjZ0
RI3tTZz1wuATH3ZwYPIp97kWEayanD1j0cDhIYzy4CkDo2jB8D5t0a6zZWzlr98d
AQFNh8uKJkIHdLShy+nUyeZxc5bNeMp1Lu0gSzE4McqfmNMvIpeiwWSYO9w82Ob8
otvXcO2JUYi3svHIWRm3+707DUbL51XMcY2iZdlCq4Wa9nbuk3WTU4gr6LY8MzVA
aDQG2+4U3eJ6qUF10bBnR1uuVyDYs9RhrwucRVnfuDj29CMLTsplM5f5wSV5hUpm
Uwp/vV7M4w4aGunt74koX71n4EdagCsL/Yk5+mAQU0+tue0JOfAV/R6t1k+Xk9s2
HMQFeoxppfzAVC04FdG9M+AC2JWxmFSt6BCuh3CEey3fE52Qrj9YM75rtvIjsm/1
Hl+u//Wqxnu1ZQ4jpa+VpuZiGOlWrqSP9eogdOhCGisnyewWJwRQOqK16wiGyZeR
xs/Bekw65vwSIaVkBruPiTfMOo0Zh4gVa8/qJgMbJbyrwwG97z/PRgmLKCDl8z3d
tA0Z7qq7fta0Gl24uyuB05dqI5J1LvAzKuWdIjT1tP8qCoxSE/xpix8hX2dt3h+/
jujUgFPFZ0EVZ0xSyBNRF3MboGZnYXFUxpNjTWPKpagDHJQmqrAcDmWJnMsFY3jS
u1igv3OefnWjSQ==
MIIEVzCCAj+gAwIBAgIRAKp18eYrjwoiCWbTi7/UuqEwDQYJKoZIhvcNAQELBQAw
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMjQwMzEzMDAwMDAw
WhcNMjcwMzEyMjM1OTU5WjAyMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNTGV0J3Mg
RW5jcnlwdDELMAkGA1UEAxMCRTcwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARB6AST
CFh/vjcwDMCgQer+VtqEkz7JANurZxLP+U9TCeioL6sp5Z8VRvRbYk4P1INBmbef
QHJFHCxcSjKmwtvGBWpl/9ra8HW0QDsUaJW2qOJqceJ0ZVFT3hbUHifBM/2jgfgw
gfUwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMCBggrBgEFBQcD
ATASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBSuSJ7chx1EoG/aouVgdAR4
wpwAgDAfBgNVHSMEGDAWgBR5tFnme7bl5AFzgAiIyBpY9umbbjAyBggrBgEFBQcB
AQQmMCQwIgYIKwYBBQUHMAKGFmh0dHA6Ly94MS5pLmxlbmNyLm9yZy8wEwYDVR0g
BAwwCjAIBgZngQwBAgEwJwYDVR0fBCAwHjAcoBqgGIYWaHR0cDovL3gxLmMubGVu
Y3Iub3JnLzANBgkqhkiG9w0BAQsFAAOCAgEAjx66fDdLk5ywFn3CzA1w1qfylHUD
aEf0QZpXcJseddJGSfbUUOvbNR9N/QQ16K1lXl4VFyhmGXDT5Kdfcr0RvIIVrNxF
h4lqHtRRCP6RBRstqbZ2zURgqakn/Xip0iaQL0IdfHBZr396FgknniRYFckKORPG
yM3QKnd66gtMst8I5nkRQlAg/Jb+Gc3egIvuGKWboE1G89NTsN9LTDD3PLj0dUMr
OIuqVjLB8pEC6yk9enrlrqjXQgkLEYhXzq7dLafv5Vkig6Gl0nuuqjqfp0Q1bi1o
yVNAlXe6aUXw92CcghC9bNsKEO1+M52YY5+ofIXlS/SEQbvVYYBLZ5yeiglV6t3S
M6H+vTG0aP9YHzLn/KVOHzGQfXDP7qM5tkf+7diZe7o2fw6O7IvN6fsQXEQQj8TJ
UXJxv2/uJhcuy/tSDgXwHM8Uk34WNbRT7zGTGkQRX0gsbjAea/jYAoWv0ZvQRwpq
Pe79D/i7Cep8qWnA+7AE/3B3S/3dEEYmc0lpe1366A/6GEgk3ktr9PEoQrLChs6I
tu3wnNLB2euC8IKGLQFpGtOO/2/hiAKjyajaBP25w1jF0Wl8Bbqne3uZ2q1GyPFJ
YRmT7/OXpmOH/FVLtwS+8ng1cAmpCujPwteJZNcDG0sF2n/sc0+SQf49fdyUK0ty
+VUwFj9tmWxyR/M=
-----END CERTIFICATE-----

View File

@@ -1,5 +1,5 @@
-----BEGIN PRIVATE KEY-----
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgcrhROXQT85e+S8h8
RE3Z7TPo3+WA2RmzJsXJbXkbi5qhRANCAASIrTkZOM4ip42DCyDADXGc7oV3+Oki
myTM3st2RIZWG28rFRwH0LebJV2cduq1HdtlVxIEr+RhvyIL7gllueXU
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgrfNUSjLV/7j7LSBf
zL/hvGEuv+uvf3/aimqjecO7vcShRANCAATGx0o7t0pSrOoFc+pljtqJVxgaSW+w
9D9C2WdysMeSKKOU+0MzaM4ynLUhETOpBs8E612mdcoeak+G1Emj6UVw
-----END PRIVATE KEY-----

View File

@@ -3,13 +3,12 @@ NEXT_PUBLIC_ENV=dev
NEXT_PUBLIC_NHOST_PLATFORM=false
# Environment Variables for Self Hosting and Local Development
NEXT_PUBLIC_NHOST_AUTH_URL=https://local.auth.local.nhost.run/v1
NEXT_PUBLIC_NHOST_CONFIGSERVER_URL=https://local.dashboard.local.nhost.run/v1/configserver/graphql
NEXT_PUBLIC_NHOST_AUTH_URL=https://local.auth.nhost.local.run/v1
NEXT_PUBLIC_NHOST_FUNCTIONS_URL=https://local.functions.local.nhost.run/v1
NEXT_PUBLIC_NHOST_GRAPHQL_URL=https://local.graphql.local.nhost.run/v1
NEXT_PUBLIC_NHOST_STORAGE_URL=https://local.storage.local.nhost.run/v1
NEXT_PUBLIC_NHOST_HASURA_CONSOLE_URL=https://local.hasura.local.nhost.run/console
NEXT_PUBLIC_NHOST_HASURA_MIGRATIONS_API_URL=https://local.hasura.local.nhost.run/apis/migrate
NEXT_PUBLIC_NHOST_HASURA_CONSOLE_URL=https://local.hasura.local.nhost.run
NEXT_PUBLIC_NHOST_HASURA_MIGRATIONS_API_URL=https://local.hasura.local.nhost.run/v1/migrations
NEXT_PUBLIC_NHOST_HASURA_API_URL=https://local.hasura.local.nhost.run
# Environment Variables when running the Nhost Dashboard against the Nhost Backend
@@ -19,13 +18,13 @@ NEXT_PUBLIC_ANALYTICS_WRITE_KEY=<analytics_write_key>
NEXT_PUBLIC_SEGMENT_CDN_URL=<segment_cdn_url>
NEXT_PUBLIC_NHOST_BRAGI_WEBSOCKET=<nhost_bragi_websocket>
NEXT_ZENDESK_URL=
NEXT_ZENDESK_API_KEY=
NEXT_ZENDESK_USER_EMAIL=
NEXT_PUBLIC_ZENDESK_URL=
NEXT_PUBLIC_ZENDESK_API_KEY=
NEXT_PUBLIC_ZENDESK_USER_EMAIL=
CODEGEN_GRAPHQL_URL=https://local.graphql.local.nhost.run/v1
CODEGEN_HASURA_ADMIN_SECRET=nhost-admin-secret
NEXT_PUBLIC_TURNSTILE_SITE_KEY=FIXME
NEXT_PUBLIC_SOC2_REPORT_FILE_ID=
NEXT_PUBLIC_SOC2_REPORT_FILE_ID=

View File

@@ -0,0 +1,47 @@
const TsconfigPathsPlugin = require('tsconfig-paths-webpack-plugin');
module.exports = {
stories: ['../src/**/*.stories.mdx', '../src/**/*.stories.@(js|jsx|ts|tsx)'],
addons: [
'@storybook/addon-links',
'@storybook/addon-essentials',
'@storybook/addon-interactions',
'storybook-addon-next-router',
{
/**
* Fix Storybook issue with PostCSS@8
* @see https://github.com/storybookjs/storybook/issues/12668#issuecomment-773958085
*/
name: '@storybook/addon-postcss',
options: {
postcssLoaderOptions: {
implementation: require('postcss'),
},
},
},
],
framework: '@storybook/react',
core: {
builder: '@storybook/builder-webpack5',
},
features: {
emotionAlias: true,
},
webpackFinal: async (config) => {
return {
...config,
resolve: {
...config?.resolve,
plugins: [
...(config?.resolve?.plugins || []),
new TsconfigPathsPlugin(),
],
},
};
},
env: (config) => ({
...config,
NEXT_PUBLIC_ENV: 'dev',
NEXT_PUBLIC_NHOST_PLATFORM: 'false',
}),
};

View File

@@ -0,0 +1,69 @@
import { NhostProvider } from '@/providers/nhost';
import '@fontsource/inter';
import '@fontsource/inter/500.css';
import '@fontsource/inter/700.css';
import { CssBaseline, ThemeProvider } from '@mui/material';
import { createClient } from '@nhost/nhost-js-beta';
import { NhostApolloProvider } from '@nhost/react-apollo';
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import { Buffer } from 'buffer';
import { initialize, mswDecorator } from 'msw-storybook-addon';
import { RouterContext } from 'next/dist/shared/lib/router-context';
import { createTheme } from '../src/components/ui/v2/createTheme';
import '../src/styles/globals.css';
global.Buffer = Buffer;
initialize({ onUnhandledRequest: 'bypass' });
const queryClient = new QueryClient();
export const parameters = {
nextRouter: {
Provider: RouterContext.Provider,
isReady: true,
},
actions: { argTypesRegex: '^on[A-Z].*' },
controls: {
matchers: {
color: /(background|color)$/i,
date: /Date$/,
},
},
};
export const decorators = [
(Story, context) => {
const isDarkMode = !context.globals?.backgrounds?.value
?.toLowerCase()
?.startsWith('#f');
return (
<ThemeProvider theme={createTheme(isDarkMode ? 'dark' : 'light')}>
<CssBaseline />
<Story />
</ThemeProvider>
);
},
(Story) => (
<QueryClientProvider client={queryClient}>
<Story />
</QueryClientProvider>
),
(Story) => (
<NhostApolloProvider
fetchPolicy="cache-first"
graphqlUrl="https://local.graphql.nhost.run/v1"
>
<Story />
</NhostApolloProvider>
),
(Story) => (
<NhostProvider
nhost={createClient({ subdomain: 'local', region: 'local' })}
>
<Story />
</NhostProvider>
),
mswDecorator,
];

View File

@@ -1,73 +1,7 @@
## [@nhost/dashboard@2.42.0] - 2025-11-12
### 🚀 Features
- *(dashboard)* Datatable design improvements (#3657)
### ⚙️ Miscellaneous Tasks
- *(dashboard)* Remove v2 ui components from datatable (#3568)
## [@nhost/dashboard@2.41.0] - 2025-11-04
### 🚀 Features
- *(auth)* Added endpoints to retrieve and refresh oauth2 providers' tokens (#3614)
- *(dashboard)* Get github repositories from github itself (#3640)
### 🐛 Bug Fixes
- *(dashboard)* Update SQL editor to use correct hasura migrations API URL (#3645)
# Changelog
All notable changes to this project will be documented in this file.
## [@nhost/dashboard@2.40.0] - 2025-10-27
### 🚀 Features
- *(dashboard)* Allow configuring CSP header (#3627)
### ⚙️ Miscellaneous Tasks
- *(dashboard)* Various improvements to support ticket page (#3630)
## [@nhost/dashboard@2.39.0] - 2025-10-22
### 🚀 Features
- *(dashboard)* Move zendesk request to API route (#3628)
### 🐛 Bug Fixes
- *(dashboard)* Fix flaky e2e tests (#3536)
- *(dashboard)* Run audit and lint in dashboard (#3578)
### ⚙️ Miscellaneous Tasks
- *(dashboard)* Cleanup e2e remote schemas test before run (#3581)
## [@nhost/dashboard@2.38.4] - 2025-10-09
### 🐛 Bug Fixes
- *(dashboard)* Remove NODE_ENV from restricted env vars (#3573)
## [@nhost/dashboard@2.38.3] - 2025-10-07
### 🐛 Bug Fixes
- *(dashboard)* Show paused banner in remote schemas/database page if project is paused (#3557)
- *(dashboard)* Show paused banner in Run page (#3564)
- *(dashboard)* Remote schema edit graphql customizations, default value for root field namespace is empty (#3565)
- *(dashboard)* Improve remote schema preview search (#3558)
## [@nhost/dashboard@2.38.2] - 2025-09-30
### 🐛 Bug Fixes

View File

@@ -62,6 +62,20 @@ NEXT_PUBLIC_NHOST_HASURA_API_URL=https://local.hasura.local.nhost.run
This will connect the Nhost Dashboard to your locally running Nhost backend.
### Storybook
Components are documented using [Storybook](https://storybook.js.org/). To run Storybook, run the following command:
```bash
pnpm storybook
```
By default, Storybook will run on port `6006`. You can change this by passing the `--port` flag:
```bash
pnpm storybook --port 6007
```
### General Environment Variables
| Name | Description |
@@ -82,15 +96,6 @@ This will connect the Nhost Dashboard to your locally running Nhost backend.
| `NEXT_PUBLIC_NHOST_HASURA_MIGRATIONS_API_URL` | The URL of Hasura's Migrations service. When working locally, point it to the Migrations service started by the CLI. |
| `NEXT_PUBLIC_NHOST_HASURA_API_URL` | The URL of Hasura's Schema and Metadata API. When working locally, point it to the Schema and Metadata API started by the CLI. When self-hosting, point it to the self-hosted Schema and Metadata API. |
### Content Security Policy (CSP) Configuration
The dashboard supports build-time CSP configuration to enable self-hosted deployments on custom domains.
| Name | Description |
| ------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `CSP_MODE` | Controls CSP behavior. Options: `nhost` (default, uses Nhost Cloud CSP), `disabled` (no CSP headers), `custom` (use custom CSP via `CSP_HEADER`). For self-hosted deployments on custom domains, set to `disabled` or `custom`. |
| `CSP_HEADER` | Custom Content Security Policy header value. Only used when `CSP_MODE=custom`. Should be a complete CSP string (e.g., `default-src 'self'; script-src 'self' 'unsafe-eval'; ...`). |
### Other Environment Variables
| Name | Description |

View File

@@ -102,11 +102,11 @@ test('should create a table with nullable columns', async ({
page.getByRole('link', { name: tableName, exact: true }),
).toBeVisible();
await page
.locator(`li:has-text("${tableName}") #table-management-menu-${tableName}`)
.locator(`li:has-text("${tableName}") #table-management-menu button`)
.click();
await page.getByText('Edit Table').click();
await expect(page.locator('h2:has-text("Edit Table")')).toBeVisible();
await expect(page.locator('div[data-testid="id"]')).toBeVisible();
expect(page.locator('h2:has-text("Edit Table")')).toBeVisible();
expect(page.locator('div[data-testid="id"]')).toBeVisible();
});
test('should create a table with an identity column', async ({
@@ -143,16 +143,16 @@ test('should create a table with an identity column', async ({
page.getByRole('link', { name: tableName, exact: true }),
).toBeVisible();
await page
.locator(`li:has-text("${tableName}") #table-management-menu-${tableName}`)
.locator(`li:has-text("${tableName}") #table-management-menu button`)
.click();
await page.getByText('Edit Table').click();
await expect(page.locator('h2:has-text("Edit Table")')).toBeVisible();
await expect(
expect(page.locator('h2:has-text("Edit Table")')).toBeVisible();
expect(
page.locator('button#identityColumnIndex :has-text("identity_column")'),
).toBeVisible();
await expect(page.locator('[id="columns.3.defaultValue"]')).toBeDisabled();
await expect(page.locator('[name="columns.3.isNullable"]')).toBeDisabled();
await expect(page.locator('[name="columns.3.isUnique"]')).toBeDisabled();
expect(page.locator('[id="columns.3.defaultValue"]')).toBeDisabled();
expect(page.locator('[name="columns.3.isNullable"]')).toBeDisabled();
expect(page.locator('[name="columns.3.isUnique"]')).toBeDisabled();
});
test('should create table with foreign key constraint', async ({
@@ -234,46 +234,6 @@ test('should create table with foreign key constraint', async ({
).toBeVisible();
});
test('should be able to create a table with a composite key', async ({
authenticatedNhostPage: page,
}) => {
await page.getByRole('button', { name: /new table/i }).click();
await expect(page.getByText(/create a new table/i)).toBeVisible();
const tableName = snakeCase(faker.lorem.words(3));
await prepareTable({
page,
name: tableName,
primaryKeys: ['id', 'second_id'],
columns: [
{ name: 'id', type: 'uuid', defaultValue: 'gen_random_uuid()' },
{ name: 'second_id', type: 'uuid', defaultValue: 'gen_random_uuid()' },
{ name: 'name', type: 'text' },
],
});
await expect(page.locator('div[data-testid="id"]')).toBeVisible();
await expect(page.locator('div[data-testid="second_id"]')).toBeVisible();
await page.getByRole('button', { name: /create/i }).click();
await page.waitForURL(
`/orgs/${TEST_ORGANIZATION_SLUG}/projects/${TEST_PROJECT_SUBDOMAIN}/database/browser/default/public/${tableName}`,
);
await expect(
page.getByRole('link', { name: tableName, exact: true }),
).toBeVisible();
await page
.locator(`li:has-text("${tableName}") #table-management-menu-${tableName}`)
.click();
await page.getByText('Edit Table').click();
await expect(page.locator('div[data-testid="id"]')).toBeVisible();
await expect(page.locator('div[data-testid="second_id"]')).toBeVisible();
});
test('should not be able to create a table with a name that already exists', async ({
authenticatedNhostPage: page,
}) => {
@@ -320,3 +280,40 @@ test('should not be able to create a table with a name that already exists', asy
page.getByText(/error: a table with this name already exists/i),
).toBeVisible();
});
test('should be able to create a table with a composite key', async ({
authenticatedNhostPage: page,
}) => {
await page.getByRole('button', { name: /new table/i }).click();
await expect(page.getByText(/create a new table/i)).toBeVisible();
const tableName = snakeCase(faker.lorem.words(3));
await prepareTable({
page,
name: tableName,
primaryKeys: ['id', 'second_id'],
columns: [
{ name: 'id', type: 'uuid', defaultValue: 'gen_random_uuid()' },
{ name: 'second_id', type: 'uuid', defaultValue: 'gen_random_uuid()' },
{ name: 'name', type: 'text' },
],
});
await page.getByRole('button', { name: /create/i }).click();
await page.waitForURL(
`/orgs/${TEST_ORGANIZATION_SLUG}/projects/${TEST_PROJECT_SUBDOMAIN}/database/browser/default/public/${tableName}`,
);
await expect(
page.getByRole('link', { name: tableName, exact: true }),
).toBeVisible();
await page
.locator(`li:has-text("${tableName}") #table-management-menu button`)
.click();
await page.getByText('Edit Table').click();
expect(page.locator('div[data-testid="id"]')).toBeVisible();
expect(page.locator('div[data-testid="second_id"]')).toBeVisible();
});

View File

@@ -41,7 +41,7 @@ test('should create a table with role permissions to select row', async ({
// Press three horizontal dots more options button next to the table name
await page
.locator(`li:has-text("${tableName}") #table-management-menu-${tableName}`)
.locator(`li:has-text("${tableName}") #table-management-menu button`)
.click();
await page.getByRole('menuitem', { name: /edit permissions/i }).click();
@@ -89,7 +89,7 @@ test('should create a table with role permissions and a custom check to select r
// Press three horizontal dots more options button next to the table name
await page
.locator(`li:has-text("${tableName}") #table-management-menu-${tableName}`)
.locator(`li:has-text("${tableName}") #table-management-menu button`)
.click();
await page.getByRole('menuitem', { name: /edit permissions/i }).click();
@@ -114,7 +114,7 @@ test('should create a table with role permissions and a custom check to select r
await page.getByText('Select variable...', { exact: true }).click();
const variableSelector = page.locator('input[role="combobox"]');
const variableSelector = await page.locator('input[role="combobox"]');
await variableSelector.fill('X-Hasura-User-Id');

View File

@@ -41,13 +41,12 @@ export const TEST_USER_PASSWORD = process.env.NHOST_TEST_USER_PASSWORD!;
export const TEST_PERSONAL_ORG_SLUG = process.env.NHOST_TEST_PERSONAL_ORG_SLUG!;
export const TEST_ONBOARDING_USER = process.env.NHOST_TEST_ONBOARDING_USER!;
const freeUserEmails = process.env.NHOST_TEST_FREE_USER_EMAILS!;
export const TEST_FREE_USER_EMAILS: string[] = JSON.parse(freeUserEmails);
/**
* Name of the remote schema serverless function to test against.
*/
export const TEST_PROJECT_REMOTE_SCHEMA_NAME =
process.env.NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME!;
export const TEST_STAGING_SUBDOMAIN = process.env.NHOST_TEST_STAGING_SUBDOMAIN!;
export const TEST_STAGING_REGION = process.env.NHOST_TEST_STAGING_REGION!;

View File

@@ -1,10 +1,13 @@
import { expect, test } from '@/e2e/fixtures/auth-hook';
import {
cleanupOnboardingTestIfNeeded,
getCardExpiration,
getOrgSlugFromUrl,
getProjectSlugFromUrl,
gotoUrl,
loginWithFreeUser,
setFreeUserStarterOrgSlug,
setNewProjectName,
setNewProjectSlug,
} from '@/e2e/utils';
import { faker } from '@faker-js/faker';
import type { Page } from '@playwright/test';
@@ -12,15 +15,13 @@ import type { Page } from '@playwright/test';
let page: Page;
test.beforeAll(async ({ browser }) => {
await cleanupOnboardingTestIfNeeded();
page = await browser.newPage();
await loginWithFreeUser(page);
});
test('user should be able to finish onboarding', async () => {
await gotoUrl(page, `/onboarding`);
await expect(page.getByText('Welcome to Nhost!')).toBeVisible();
expect(page.getByText('Welcome to Nhost!')).toBeVisible();
const organizationName = faker.lorem.words(3).slice(0, 32);
await page.getByLabel('Organization Name').fill(organizationName);
@@ -67,28 +68,34 @@ test('user should be able to finish onboarding', async () => {
.getByTestId('hosted-payment-submit-button')
.click({ force: true });
await expect(
expect(
page.getByText('Processing new organization request').first(),
).toBeVisible();
await page.waitForSelector(
'div:has-text("Organization created successfully. Redirecting...")',
);
await expect(page.getByText('Create Your First Project')).toBeVisible();
expect(page.getByText('Create Your First Project')).toBeVisible();
const projectName = faker.lorem.words(3).slice(0, 32);
await page.getByLabel('Project Name').fill(projectName);
await page.getByText('Create Project', { exact: true }).click();
await expect(page.getByText('Creating your project...')).toBeVisible();
await expect(page.getByText('Project created successfully!')).toBeVisible();
expect(page.getByText('Creating your project...')).toBeVisible();
expect(page.getByText('Project created successfully!')).toBeVisible();
await expect(page.getByText('Internal info')).toBeVisible();
expect(page.getByText('Internal info')).toBeVisible();
await page.waitForSelector('h3:has-text("Project Health")', {
timeout: 180000,
});
const newProjectSlug = getProjectSlugFromUrl(page.url());
setNewProjectSlug(newProjectSlug);
setNewProjectName(organizationName);
const newOrgSlug = getOrgSlugFromUrl(page.url());
setFreeUserStarterOrgSlug(newOrgSlug);
});
test('should delete the new organization', async () => {
@@ -100,12 +107,12 @@ test('should delete the new organization', async () => {
await page.getByRole('button', { name: 'Delete' }).click();
await page.waitForSelector('h2:has-text("Delete Organization")');
await expect(page.getByTestId('deleteOrgButton')).toBeDisabled();
expect(page.getByTestId('deleteOrgButton')).toBeDisabled();
await page.getByLabel("I'm sure I want to delete this Organization").click();
await expect(page.getByTestId('deleteOrgButton')).toBeDisabled();
expect(page.getByTestId('deleteOrgButton')).toBeDisabled();
await page.getByLabel('I understand this action cannot be undone').click();
await expect(page.getByTestId('deleteOrgButton')).not.toBeDisabled();
expect(page.getByTestId('deleteOrgButton')).not.toBeDisabled();
await page.getByTestId('deleteOrgButton').click();
@@ -138,7 +145,7 @@ test('should be able to upgrade an organization', async () => {
await page.getByRole('link', { name: 'Billing' }).click();
await page.waitForSelector('h4:has-text("Subscription plan")');
await expect(page.getByText('Upgrade')).toBeEnabled();
expect(page.getByText('Upgrade')).toBeEnabled();
await page.getByText('Upgrade').click();
await page.waitForSelector('h2:has-text("Upgrade Organization")');
@@ -198,12 +205,12 @@ test('should be able to upgrade an organization', async () => {
await page.getByRole('button', { name: 'Delete' }).click();
await page.waitForSelector('h2:has-text("Delete Organization")');
await expect(page.getByTestId('deleteOrgButton')).toBeDisabled();
expect(page.getByTestId('deleteOrgButton')).toBeDisabled();
await page.getByLabel("I'm sure I want to delete this Organization").click();
await expect(page.getByTestId('deleteOrgButton')).toBeDisabled();
expect(page.getByTestId('deleteOrgButton')).toBeDisabled();
await page.getByLabel('I understand this action cannot be undone').click();
await expect(page.getByTestId('deleteOrgButton')).not.toBeDisabled();
expect(page.getByTestId('deleteOrgButton')).not.toBeDisabled();
await page.getByTestId('deleteOrgButton').click();

Some files were not shown because too many files have changed in this diff Show More