Compare commits
6 Commits
cli@1.34.5
...
feat/add-e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f539713e10 | ||
|
|
9b150c786e | ||
|
|
3d917b6e12 | ||
|
|
7574504cd5 | ||
|
|
be9f6b6967 | ||
|
|
affe9db42a |
9
.changeset/README.md
Normal file
9
.changeset/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Changesets
|
||||
|
||||
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool
|
||||
that works with multi-package repos, or single-package repos to help you version and publish your
|
||||
code. You can find the full documentation for it
|
||||
[in our repository](https://github.com/changesets/changesets)
|
||||
|
||||
We have a quick list of common questions to get you started engaging with this project in
|
||||
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
|
||||
9
.changeset/config.json
Normal file
9
.changeset/config.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"$schema": "https://unpkg.com/@changesets/config@1.6.0/schema.json",
|
||||
"changelog": "@changesets/cli/changelog",
|
||||
"commit": false,
|
||||
"linked": [],
|
||||
"access": "restricted",
|
||||
"baseBranch": "main",
|
||||
"updateInternalDependencies": "patch"
|
||||
}
|
||||
5
.changeset/dry-kids-chew.md
Normal file
5
.changeset/dry-kids-chew.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@nhost/dashboard': minor
|
||||
---
|
||||
|
||||
feat: add empty string as default value for text in databases
|
||||
5
.changeset/quick-keys-carry.md
Normal file
5
.changeset/quick-keys-carry.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
'@nhost/dashboard': minor
|
||||
---
|
||||
|
||||
fix: update babel dependencies to address security audit vulnerabilities
|
||||
14
.github/CODEOWNERS
vendored
Normal file
14
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Documentation
|
||||
# https://help.github.com/en/articles/about-code-owners
|
||||
|
||||
/packages @nunopato @onehassan
|
||||
/packages/docgen @nunopato @onehassan
|
||||
/integrations/stripe-graphql-js @nunopato @onehassan
|
||||
/.github @nunopato @onehassan
|
||||
/dashboard/ @nunopato @onehassan
|
||||
/docs/ @nunopato @onehassan
|
||||
/config/ @nunopato @onehassan
|
||||
/examples/ @nunopato @onehassan
|
||||
/examples/codegen-react-apollo @nunopato @onehassan
|
||||
/examples/codegen-react-query @nunopato @onehassan
|
||||
/examples/react-apollo-crm @nunopato @onehassan
|
||||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -7,8 +7,6 @@ assignees: ''
|
||||
|
||||
---
|
||||
|
||||
> **Note:** Bug reports that are clearly AI-generated will not be accepted and will be closed immediately. Please write your bug report in your own words.
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -7,8 +7,6 @@ assignees: ''
|
||||
|
||||
---
|
||||
|
||||
> **Note:** Feature requests that are clearly AI-generated will not be accepted and will be closed immediately. Please write your feature request in your own words.
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
|
||||
46
.github/PULL_REQUEST_TEMPLATE.md
vendored
46
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,46 +0,0 @@
|
||||
### Checklist
|
||||
|
||||
- [ ] No breaking changes
|
||||
- [ ] Tests pass
|
||||
- [ ] New features have new tests
|
||||
- [ ] Documentation is updated (if applicable)
|
||||
- [ ] Title of the PR is in the correct format (see below)
|
||||
|
||||
--- Delete everything below this line before submitting your PR ---
|
||||
|
||||
> **Note on AI-assisted contributions:** Contributions with the help of AI are permitted, but you are ultimately responsible for the quality of your submission and for ensuring it follows our contributing guidelines. **The PR description must be written in your own words and be clear and concise**. Please ensure you remove any superfluous code comments introduced by AI tools before submitting. PRs that clearly violate this rule will be closed without further review.
|
||||
|
||||
### PR title format
|
||||
|
||||
The PR title must follow the following pattern:
|
||||
|
||||
`TYPE(PKG): SUMMARY`
|
||||
|
||||
Where `TYPE` is:
|
||||
|
||||
- feat: mark this pull request as a feature
|
||||
- fix: mark this pull request as a bug fix
|
||||
- chore: mark this pull request as a maintenance item
|
||||
|
||||
Where `PKG` is:
|
||||
|
||||
- `auth`: For changes to the Nhost Auth service
|
||||
- `ci`: For general changes to the build and/or CI/CD pipeline
|
||||
- `cli`: For changes to the Nhost CLI
|
||||
- `codegen`: For changes to the code generator
|
||||
- `dashboard`: For changes to the Nhost Dashboard
|
||||
- `deps`: For changes to dependencies
|
||||
- `docs`: For changes to the documentation
|
||||
- `examples`: For changes to the examples
|
||||
- `internal/lib`: For changes to Nhost's common libraries (internal)
|
||||
- `mintlify-openapi`: For changes to the Mintlify OpenAPI tool
|
||||
- `nhost-js`: For changes to the Nhost JavaScript SDK
|
||||
- `nixops`: For changes to the NixOps
|
||||
- `storage`: For changes to the Nhost Storage service
|
||||
|
||||
Where `SUMMARY` is a short description of what the PR does.
|
||||
|
||||
### Tests
|
||||
|
||||
- please make sure your changes pass the current tests (Use the `make test`
|
||||
- if you are introducing a new feature, please write as much tests as possible.
|
||||
15
.github/actions/cache-nix/action.yml
vendored
15
.github/actions/cache-nix/action.yml
vendored
@@ -1,15 +0,0 @@
|
||||
name: 'Cache Nix to S3'
|
||||
description: 'Copy Nix store to S3-backed cache'
|
||||
inputs:
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
description: 'Nix cache private key'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: "Cache build"
|
||||
shell: bash
|
||||
run: |
|
||||
nix store sign --key-file <(echo "${{ inputs.NIX_CACHE_PRIV_KEY }}") --all
|
||||
nix copy --to 's3://nhost-nix-cache?region=eu-central-1' --substitute-on-destination --all
|
||||
29
.github/actions/discord-notification/action.yml
vendored
29
.github/actions/discord-notification/action.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: 'Discord Notification'
|
||||
description: 'Send a Discord notification with conditional check'
|
||||
|
||||
inputs:
|
||||
webhook-url:
|
||||
description: 'Discord webhook URL'
|
||||
required: true
|
||||
title:
|
||||
description: 'Embed title'
|
||||
required: true
|
||||
description:
|
||||
description: 'Embed description'
|
||||
required: true
|
||||
color:
|
||||
description: 'Embed color (decimal number)'
|
||||
required: false
|
||||
default: '5763719'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Send Discord notification
|
||||
if: ${{ inputs.webhook-url }}
|
||||
uses: tsickert/discord-webhook@v7.0.0
|
||||
with:
|
||||
webhook-url: ${{ inputs.webhook-url }}
|
||||
embed-title: ${{ inputs.title }}
|
||||
embed-description: ${{ inputs.description }}
|
||||
embed-color: ${{ inputs.color }}
|
||||
59
.github/actions/install-dependencies/action.yaml
vendored
Normal file
59
.github/actions/install-dependencies/action.yaml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
name: Install Node and package dependencies
|
||||
description: 'Install Node dependencies with pnpm'
|
||||
inputs:
|
||||
TURBO_TOKEN:
|
||||
description: 'Turborepo token'
|
||||
TURBO_TEAM:
|
||||
description: 'Turborepo team'
|
||||
BUILD:
|
||||
description: 'Build packages'
|
||||
default: 'default'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.1.0
|
||||
run_install: false
|
||||
- name: Get pnpm cache directory
|
||||
id: pnpm-cache-dir
|
||||
shell: bash
|
||||
run: echo "dir=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
- uses: actions/cache@v4
|
||||
id: pnpm-cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache-dir.outputs.dir }}
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
restore-keys: ${{ runner.os }}-node-
|
||||
- name: Use Node.js v20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
- shell: bash
|
||||
name: Use Latest Corepack
|
||||
run: |
|
||||
echo "Before: corepack version => $(corepack --version || echo 'not installed')"
|
||||
npm install -g corepack@latest
|
||||
echo "After : corepack version => $(corepack --version)"
|
||||
corepack enable
|
||||
pnpm --version
|
||||
- shell: bash
|
||||
name: Install packages
|
||||
run: pnpm install --frozen-lockfile
|
||||
# * Build all Nhost packages as they are all supposed to be tested.
|
||||
# * They are reused through the Turborepo cache
|
||||
- shell: bash
|
||||
name: Build packages
|
||||
if: ${{ inputs.BUILD == 'all' }}
|
||||
run: pnpm run build:all
|
||||
env:
|
||||
TURBO_TOKEN: ${{ inputs.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ inputs.TURBO_TEAM }}
|
||||
- shell: bash
|
||||
name: Build everything in the monorepo
|
||||
if: ${{ inputs.BUILD == 'default' }}
|
||||
run: pnpm run build
|
||||
env:
|
||||
TURBO_TOKEN: ${{ inputs.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ inputs.TURBO_TEAM }}
|
||||
108
.github/actions/nhost-cli/README.md
vendored
Normal file
108
.github/actions/nhost-cli/README.md
vendored
Normal file
@@ -0,0 +1,108 @@
|
||||
# Nhost CLI GitHub Action
|
||||
|
||||
## Usage
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
```
|
||||
|
||||
### Install the CLI and start the app
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI and start the application
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
start: true
|
||||
```
|
||||
|
||||
### Set another working directory
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
path: examples/react-apollo
|
||||
start: true
|
||||
```
|
||||
|
||||
### Don't wait for the app to be ready
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI and start app
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
start: true
|
||||
wait: false
|
||||
```
|
||||
|
||||
### Stop the app
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Start app
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
start: true
|
||||
- name: Do something
|
||||
cmd: echo "do something"
|
||||
- name: Stop
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
stop: true
|
||||
```
|
||||
|
||||
### Install a given value of the CLI
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
version: v0.8.10
|
||||
```
|
||||
|
||||
### Inject values into nhost/config.yaml
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
config: |
|
||||
services:
|
||||
auth:
|
||||
image: nhost/hasura-auth:0.16.1
|
||||
```
|
||||
84
.github/actions/nhost-cli/action.yaml
vendored
Normal file
84
.github/actions/nhost-cli/action.yaml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: Nhost CLI
|
||||
description: 'Action to install the Nhost CLI and to run an application'
|
||||
inputs:
|
||||
init:
|
||||
description: 'Initialize the application'
|
||||
default: 'false'
|
||||
start:
|
||||
description: "Start the application. If false, the application won't be started"
|
||||
default: 'false'
|
||||
wait:
|
||||
description: 'If starting the application, wait until it is ready'
|
||||
default: 'true'
|
||||
stop:
|
||||
description: 'Stop the application'
|
||||
default: 'false'
|
||||
path:
|
||||
description: 'Path to the application'
|
||||
default: '.'
|
||||
version:
|
||||
description: 'Version of the Nhost CLI'
|
||||
default: 'latest'
|
||||
dashboard-image:
|
||||
description: 'Image of the dashboard'
|
||||
default: 'nhost/dashboard:latest'
|
||||
config:
|
||||
description: 'Values to be injected into nhost/config.yaml'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Check if Nhost CLI is already installed
|
||||
id: check-nhost-cli
|
||||
shell: bash
|
||||
# TODO check if the version is the same
|
||||
run: |
|
||||
if [ -z "$(which nhost)" ]
|
||||
then
|
||||
echo "installed=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "installed=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Install Nhost CLI
|
||||
if: ${{ steps.check-nhost-cli.outputs.installed == 'false' }}
|
||||
uses: nick-fields/retry@v2
|
||||
with:
|
||||
timeout_minutes: 3
|
||||
max_attempts: 10
|
||||
command: bash <(curl --silent -L https://raw.githubusercontent.com/nhost/cli/main/get.sh) ${{ inputs.version }}
|
||||
- name: Initialize a new project from scratch
|
||||
if: ${{ inputs.init == 'true' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: |
|
||||
rm -rf ./*
|
||||
nhost init
|
||||
- name: Set custom configuration
|
||||
if: ${{ inputs.config }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: config="${{ inputs.config }}" yq -i '. *= env(config)' nhost/config.yaml
|
||||
- name: Start the application
|
||||
if: ${{ inputs.start == 'true' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: |
|
||||
if [ -n "${{ inputs.dashboard-image }}" ]; then
|
||||
export NHOST_DASHBOARD_VERSION=${{ inputs.dashboard-image }}
|
||||
fi
|
||||
if [ -f .secrets.example ]; then
|
||||
cp .secrets.example .secrets
|
||||
fi
|
||||
nhost up
|
||||
- name: Log on failure
|
||||
if: steps.wait.outcome == 'failure'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: |
|
||||
nhost logs
|
||||
exit 1
|
||||
- name: Stop the application
|
||||
if: ${{ inputs.stop == 'true' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: nhost down
|
||||
51
.github/actions/setup-nix/action.yml
vendored
51
.github/actions/setup-nix/action.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: 'Setup Nix'
|
||||
description: 'Install Nix and setup caching for Nhost projects'
|
||||
inputs:
|
||||
NAME:
|
||||
description: 'Project name for cache key'
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
description: 'Nix cache public key'
|
||||
required: true
|
||||
GITHUB_TOKEN:
|
||||
description: 'GitHub token for Nix access'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ inputs.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ inputs.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-${{ inputs.NAME }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
# - name: "Verify if nixops is pre-built"
|
||||
# id: verify-nixops-build
|
||||
# run: |
|
||||
# export drvPath=$(make build-nixops-dry-run)
|
||||
# echo "Derivation path: $drvPath"
|
||||
# nix path-info --store s3://nhost-nix-cache\?region=eu-central-1 $drvPath \
|
||||
# || (echo "Wait until nixops is already built and cached and run again" && exit 1)
|
||||
# if: ${{ inputs.NAME != 'nixops' }}
|
||||
|
||||
41
.github/actions/validate-pr-title/action.yaml
vendored
41
.github/actions/validate-pr-title/action.yaml
vendored
@@ -1,41 +0,0 @@
|
||||
---
|
||||
name: "Validate PR Title"
|
||||
description: "Validates that PR title follows the required format: TYPE(PKG): SUMMARY"
|
||||
inputs:
|
||||
pr_title:
|
||||
description: "The PR title to validate"
|
||||
required: true
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: "Validate PR title format"
|
||||
shell: bash
|
||||
run: |
|
||||
PR_TITLE="${{ inputs.pr_title }}"
|
||||
|
||||
echo "Validating PR title: $PR_TITLE"
|
||||
|
||||
# Define valid types and packages
|
||||
VALID_TYPES="feat|fix|chore"
|
||||
VALID_PKGS="auth|ci|cli|codegen|dashboard|deps|docs|examples|internal\/lib|mintlify-openapi|nhost-js|nixops|storage"
|
||||
|
||||
# Check if title matches the pattern TYPE(PKG): SUMMARY
|
||||
if [[ ! "$PR_TITLE" =~ ^(${VALID_TYPES})\((${VALID_PKGS})\):\ .+ ]]; then
|
||||
echo "❌ PR title does not follow the required format!"
|
||||
echo ""
|
||||
echo "Expected format: TYPE(PKG): SUMMARY"
|
||||
echo ""
|
||||
echo "Valid TYPEs:"
|
||||
echo " - feat: mark this pull request as a feature"
|
||||
echo " - fix: mark this pull request as a bug fix"
|
||||
echo " - chore: mark this pull request as a maintenance item"
|
||||
echo ""
|
||||
echo "Valid PKGs:"
|
||||
echo " - auth, ci, cli, codegen, dashboard, deps, docs, examples,"
|
||||
echo " - mintlify-openapi, nhost-js, nixops, storage"
|
||||
echo ""
|
||||
echo "Example: feat(cli): add new command for database migrations"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ PR title is valid!"
|
||||
14
.github/dependabot.yaml
vendored
14
.github/dependabot.yaml
vendored
@@ -1,14 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
open-pull-requests-limit: 10
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "04:00"
|
||||
commit-message:
|
||||
prefix: "chore(ci)"
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "chore"
|
||||
24
.github/labeler.yml
vendored
Normal file
24
.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
dashboard:
|
||||
- dashboard/**/*
|
||||
|
||||
documentation:
|
||||
- any:
|
||||
- docs/**/*
|
||||
|
||||
examples:
|
||||
- examples/**/*
|
||||
|
||||
sdk:
|
||||
- packages/**/*
|
||||
|
||||
integrations:
|
||||
- integrations/**/*
|
||||
|
||||
react:
|
||||
- '{packages,examples,integrations}/*react*/**/*'
|
||||
|
||||
nextjs:
|
||||
- '{packages,examples}/*next*/**/*'
|
||||
|
||||
vue:
|
||||
- '{packages,examples,integrations}/*vue*/**/*'
|
||||
84
.github/workflows/auth_checks.yaml
vendored
84
.github/workflows/auth_checks.yaml
vendored
@@ -1,84 +0,0 @@
|
||||
---
|
||||
name: "auth: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/auth_checks.yaml'
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/wf_build_artifacts.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'internal/lib/**'
|
||||
- 'vendor/**'
|
||||
|
||||
# auth
|
||||
- 'services/auth/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: auth
|
||||
PATH: services/auth
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: auth
|
||||
PATH: services/auth
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
60
.github/workflows/auth_wf_release.yaml
vendored
60
.github/workflows/auth_wf_release.yaml
vendored
@@ -1,60 +0,0 @@
|
||||
---
|
||||
name: "auth: release"
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
GIT_REF:
|
||||
required: true
|
||||
type: string
|
||||
VERSION:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
NAME: auth
|
||||
PATH: services/auth
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
push-docker-hub:
|
||||
uses: ./.github/workflows/wf_docker_push_image.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: auth
|
||||
PATH: services/auth
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
push-docker-ecr:
|
||||
uses: ./.github/workflows/wf_docker_push_image_ecr.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: auth
|
||||
PATH: services/auth
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
CONTAINER_REGISTRY: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.eu-central-1.amazonaws.com
|
||||
157
.github/workflows/changesets.yaml
vendored
Normal file
157
.github/workflows/changesets.yaml
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- 'examples/**'
|
||||
- 'assets/**'
|
||||
- '**.md'
|
||||
- '!.changeset/**'
|
||||
- 'LICENSE'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: nhost
|
||||
DASHBOARD_PACKAGE: '@nhost/dashboard'
|
||||
|
||||
jobs:
|
||||
version:
|
||||
name: Version
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
hasChangesets: ${{ steps.changesets.outputs.hasChangesets }}
|
||||
dashboardVersion: ${{ steps.dashboard.outputs.dashboardVersion }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- name: Create PR or Publish release
|
||||
id: changesets
|
||||
uses: changesets/action@v1
|
||||
with:
|
||||
version: pnpm run ci:version
|
||||
commit: 'chore: update versions'
|
||||
title: 'chore: update versions'
|
||||
publish: pnpm run release
|
||||
createGithubReleases: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- name: Check Dashboard tag
|
||||
id: dashboard
|
||||
if: steps.changesets.outputs.hasChangesets == 'false'
|
||||
run: |
|
||||
DASHBOARD_VERSION=$(jq -r .version dashboard/package.json)
|
||||
GIT_TAG="${{ env.DASHBOARD_PACKAGE}}@$DASHBOARD_VERSION"
|
||||
if [ -z "$(git tag -l | grep $GIT_TAG)" ]; then
|
||||
echo "dashboardVersion=$DASHBOARD_VERSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
test:
|
||||
needs: version
|
||||
name: Dashboard
|
||||
if: needs.version.outputs.dashboardVersion != ''
|
||||
uses: ./.github/workflows/dashboard.yaml
|
||||
secrets: inherit
|
||||
|
||||
publish-vercel:
|
||||
name: Publish to Vercel
|
||||
needs:
|
||||
- test
|
||||
uses: ./.github/workflows/deploy-dashboard.yaml
|
||||
with:
|
||||
git_ref: ${{ github.ref_name }}
|
||||
environment: production
|
||||
secrets: inherit
|
||||
|
||||
publish-docker:
|
||||
name: Publish to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- test
|
||||
- version
|
||||
- publish-vercel
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Add git tag
|
||||
run: |
|
||||
git tag "${{ env.DASHBOARD_PACKAGE }}@${{ needs.version.outputs.dashboardVersion }}"
|
||||
git push origin --tags
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
nhost/dashboard
|
||||
tags: |
|
||||
type=raw,value=latest,enable=true
|
||||
type=semver,pattern={{version}},value=v${{ needs.version.outputs.dashboardVersion }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=v${{ needs.version.outputs.dashboardVersion }}
|
||||
type=semver,pattern={{major}},value=v${{ needs.version.outputs.dashboardVersion }}
|
||||
type=sha
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push to Docker Hub
|
||||
uses: docker/build-push-action@v4
|
||||
timeout-minutes: 90
|
||||
with:
|
||||
context: .
|
||||
file: ./dashboard/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
TURBO_TOKEN=${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM=${{ env.TURBO_TEAM }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
push: true
|
||||
|
||||
bump-cli:
|
||||
name: Bump Dashboard version in the Nhost CLI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- version
|
||||
- publish-docker
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: nhost/cli
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
fetch-depth: 0
|
||||
- name: Bump version in source code
|
||||
run: |
|
||||
IMAGE=$(echo ${{ env.DASHBOARD_PACKAGE }} | sed 's/@\(.\+\)\/\(.\+\)/\1\\\/\2/g')
|
||||
VERSION="${{ needs.version.outputs.dashboardVersion }}"
|
||||
EXPRESSION='s/"'$IMAGE':[0-9]\+\.[0-9]\+\.[0-9]\+"/"'$IMAGE':'$VERSION'"/g'
|
||||
find ./ -type f -exec sed -i -e $EXPRESSION {} \;
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
commit-message: 'chore: bump nhost/dashboard to ${{ needs.version.outputs.dashboardVersion }}'
|
||||
branch: bump-dashboard-version
|
||||
delete-branch: true
|
||||
title: 'chore: bump nhost/dashboard to ${{ needs.version.outputs.dashboardVersion }}'
|
||||
body: |
|
||||
This PR bumps the Nhost Dashboard Docker image to version ${{ needs.version.outputs.dashboardVersion }}.
|
||||
200
.github/workflows/ci.yaml
vendored
Normal file
200
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
name: Continuous Integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- 'assets/**'
|
||||
- '**.md'
|
||||
- 'LICENSE'
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths-ignore:
|
||||
- 'assets/**'
|
||||
- '**.md'
|
||||
- 'LICENSE'
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: nhost
|
||||
NEXT_PUBLIC_ENV: dev
|
||||
NEXT_TELEMETRY_DISABLED: 1
|
||||
NHOST_TEST_DASHBOARD_URL: ${{ vars.NHOST_TEST_DASHBOARD_URL }}
|
||||
NHOST_TEST_WORKSPACE_NAME: ${{ vars.NHOST_TEST_WORKSPACE_NAME }}
|
||||
NHOST_TEST_PROJECT_NAME: ${{ vars.NHOST_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_NAME: ${{ vars.NHOST_TEST_ORGANIZATION_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_SLUG: ${{ vars.NHOST_TEST_ORGANIZATION_SLUG }}
|
||||
NHOST_TEST_PERSONAL_ORG_SLUG: ${{ vars.NHOST_TEST_PERSONAL_ORG_SLUG }}
|
||||
NHOST_TEST_PROJECT_SUBDOMAIN: ${{ vars.NHOST_TEST_PROJECT_SUBDOMAIN }}
|
||||
NHOST_PRO_TEST_PROJECT_NAME: ${{ vars.NHOST_PRO_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_USER_EMAIL: ${{ secrets.NHOST_TEST_USER_EMAIL }}
|
||||
NHOST_TEST_USER_PASSWORD: ${{ secrets.NHOST_TEST_USER_PASSWORD }}
|
||||
NHOST_TEST_PROJECT_ADMIN_SECRET: ${{ secrets.NHOST_TEST_PROJECT_ADMIN_SECRET }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build @nhost packages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# * Install Node and dependencies. Package downloads will be cached for the next jobs.
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
BUILD: 'all'
|
||||
- name: Check if the pnpm lockfile changed
|
||||
id: changed-lockfile
|
||||
uses: tj-actions/changed-files@v37
|
||||
with:
|
||||
files: pnpm-lock.yaml
|
||||
# * Determine a pnpm filter argument for packages that have been modified.
|
||||
# * If the lockfile has changed, we don't filter anything in order to run all the e2e tests.
|
||||
- name: filter packages
|
||||
id: filter-packages
|
||||
if: steps.changed-lockfile.outputs.any_changed != 'true' && github.event_name == 'pull_request'
|
||||
run: echo "filter=${{ format('--filter=...[origin/{0}]', github.base_ref) }}" >> $GITHUB_OUTPUT
|
||||
# * List packagesthat has an `e2e` script, except the root, and return an array of their name and path
|
||||
# * In a PR, only include packages that have been modified, and their dependencies
|
||||
- name: List examples with an e2e script
|
||||
id: set-matrix
|
||||
run: |
|
||||
PACKAGES=$(pnpm recursive list --depth -1 --parseable --filter='!nhost-root' ${{ steps.filter-packages.outputs.filter }} \
|
||||
| xargs -I@ realpath --relative-to=$PWD @ \
|
||||
| xargs -I@ jq "if (.scripts.e2e | length) != 0 then {name: .name, path: \"@\"} else null end" @/package.json \
|
||||
| awk "!/null/" \
|
||||
| jq -c --slurp 'map(select(length > 0))')
|
||||
echo "matrix=$PACKAGES" >> $GITHUB_OUTPUT
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
|
||||
unit:
|
||||
name: Unit tests
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# * Install Node and dependencies. Package dependencies won't be downloaded again as they have been cached by the `build` job.
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
# * Run every `test` script in the workspace . Dependencies build is cached by Turborepo
|
||||
- name: Run unit tests
|
||||
run: pnpm run test:all
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
files: '**/coverage/coverage-final.json'
|
||||
name: codecov-umbrella
|
||||
- name: Create summary
|
||||
run: |
|
||||
echo '### Code coverage' >> $GITHUB_STEP_SUMMARY
|
||||
echo 'Visit [codecov](https://app.codecov.io/gh/nhost/nhost/) to see the code coverage reports' >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# * Install Node and dependencies. Package dependencies won't be downloaded again as they have been cached by the `build` job.
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- name: Enforce Prettier formatting in dashboard
|
||||
working-directory: ./dashboard
|
||||
run: pnpm prettier --check "./**/*.tsx" --config prettier.config.js
|
||||
# * Run every `lint` script in the workspace . Dependencies build is cached by Turborepo
|
||||
- name: Lint
|
||||
run: pnpm run lint:all
|
||||
- name: Audit for vulnerabilities
|
||||
run: pnpx audit-ci --config ./audit-ci.jsonc
|
||||
|
||||
e2e:
|
||||
name: 'E2E (Package: ${{ matrix.package.path }})'
|
||||
needs: build
|
||||
if: ${{ needs.build.outputs.matrix != '[]' && needs.build.outputs.matrix != '' }}
|
||||
strategy:
|
||||
# * Don't cancel other matrices when one fails
|
||||
fail-fast: false
|
||||
matrix:
|
||||
package: ${{ fromJson(needs.build.outputs.matrix) }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# * Install Node and dependencies. Package dependencies won't be downloaded again as they have been cached by the `build` job.
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
# * Build Dashboard image to test it locally
|
||||
- name: Build Dashboard local image
|
||||
if: matrix.package.path == 'dashboard'
|
||||
run: |
|
||||
docker build -t nhost/dashboard:0.0.0-dev -f ${{ matrix.package.path }}/Dockerfile .
|
||||
mkdir -p nhost-test-project
|
||||
# * Install Nhost CLI if a `nhost/config.yaml` file is found
|
||||
- name: Install Nhost CLI
|
||||
if: hashFiles(format('{0}/nhost/config.yaml', matrix.package.path)) != '' && matrix.package.path != 'dashboard'
|
||||
uses: ./.github/actions/nhost-cli
|
||||
# * Install Nhost CLI to test Dashboard locally
|
||||
- name: Install Nhost CLI (Local Dashboard tests)
|
||||
timeout-minutes: 5
|
||||
if: matrix.package.path == 'dashboard'
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
init: 'true' # Initialize the application
|
||||
start: 'true' # Start the application
|
||||
path: ./nhost-test-project
|
||||
wait: 'true' # Wait until the application is ready
|
||||
dashboard-image: 'nhost/dashboard:0.0.0-dev'
|
||||
- name: Fetch Dashboard Preview URL
|
||||
id: fetch-dashboard-preview-url
|
||||
uses: zentered/vercel-preview-url@v1.1.9
|
||||
if: github.ref_name != 'main'
|
||||
env:
|
||||
VERCEL_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
GITHUB_REF: ${{ github.ref_name }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
with:
|
||||
vercel_team_id: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
vercel_project_id: ${{ secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
vercel_state: BUILDING,READY,INITIALIZING
|
||||
- name: Set Dashboard Preview URL
|
||||
if: steps.fetch-dashboard-preview-url.outputs.preview_url != ''
|
||||
run: echo "NHOST_TEST_DASHBOARD_URL=https://${{ steps.fetch-dashboard-preview-url.outputs.preview_url }}" >> $GITHUB_ENV
|
||||
# * Run the `ci` script of the current package of the matrix. Dependencies build is cached by Turborepo
|
||||
- name: Run e2e tests
|
||||
timeout-minutes: 20
|
||||
run: pnpm --filter="${{ matrix.package.name }}" run e2e
|
||||
# * Run the `e2e-local` script of the dashboard
|
||||
- name: Run Local Dashboard e2e tests
|
||||
if: matrix.package.path == 'dashboard'
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
pnpm --filter="${{ matrix.package.name }}" run e2e-local
|
||||
|
||||
- name: Stop Nhost CLI
|
||||
if: matrix.package.path == 'dashboard'
|
||||
working-directory: ./nhost-test-project
|
||||
run: nhost down
|
||||
- id: file-name
|
||||
if: ${{ failure() }}
|
||||
name: Transform package name into a valid file name
|
||||
run: |
|
||||
PACKAGE_FILE_NAME=$(echo "${{ matrix.package.name }}" | sed 's/@//g; s/\//-/g')
|
||||
echo "fileName=$PACKAGE_FILE_NAME" >> $GITHUB_OUTPUT
|
||||
# * Run this step only if the previous step failed, and Playwright generated a report
|
||||
- name: Upload Playwright Report
|
||||
if: ${{ failure() && hashFiles(format('{0}/playwright-report/**', matrix.package.path)) != ''}}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: playwright-${{ steps.file-name.outputs.fileName }}
|
||||
path: ${{format('{0}/playwright-report/**', matrix.package.path)}}
|
||||
77
.github/workflows/ci_create_release.yaml
vendored
77
.github/workflows/ci_create_release.yaml
vendored
@@ -1,77 +0,0 @@
|
||||
---
|
||||
name: "ci: create release"
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
if: github.event.pull_request.merged == true && startsWith(github.event.pull_request.title, 'release(')
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
pull-requests: read
|
||||
actions: write
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: ${{ inputs.NAME }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Extract project and version from PR title"
|
||||
id: extract
|
||||
run: |
|
||||
TITLE="${{ github.event.pull_request.title }}"
|
||||
|
||||
PROJECT=$(echo "${TITLE}" | sed 's/release(\([^)]*\)).*/\1/')
|
||||
if [ -z "$PROJECT" ]; then
|
||||
echo "Error: Could not extract project name from PR title"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION=$(echo "${TITLE}" | sed 's/.*release([^)]*):\W*\(.*\).*/\1/')
|
||||
if [ -z "$VERSION" ]; then
|
||||
echo "Error: Could not extract version from PR title"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd $PROJECT
|
||||
|
||||
PROJECT_NAME=$(make release-tag-name)
|
||||
|
||||
echo "project=$PROJECT" >> $GITHUB_OUTPUT
|
||||
echo "project_name=$PROJECT_NAME" >> $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "tag=$PROJECT_NAME@$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get unreleased changelog content"
|
||||
id: changelog
|
||||
run: |
|
||||
cd ${{ steps.extract.outputs.project }}
|
||||
CHANGELOG_CONTENT=$(nix develop .#cliff -c make changelog-get-unreleased)
|
||||
echo "content<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CHANGELOG_CONTENT" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Create GitHub Release"
|
||||
run: |
|
||||
gh release create "${{ steps.extract.outputs.tag }}" \
|
||||
--title "${{ steps.extract.outputs.tag }}" \
|
||||
--notes "${{ steps.changelog.outputs.content }}" \
|
||||
--target main
|
||||
env:
|
||||
# We need to use a PAT because GITHUB_TOKEN does not trigger workflows on releases
|
||||
GH_TOKEN: ${{ secrets.GH_PAT }}
|
||||
111
.github/workflows/ci_release.yaml
vendored
111
.github/workflows/ci_release.yaml
vendored
@@ -1,111 +0,0 @@
|
||||
---
|
||||
name: "ci: release"
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
extract-project:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
project: ${{ steps.extract.outputs.project }}
|
||||
version: ${{ steps.extract.outputs.version }}
|
||||
steps:
|
||||
- name: "Extract project and version from tag"
|
||||
id: extract
|
||||
run: |
|
||||
TAG="${{ github.event.release.tag_name }}"
|
||||
|
||||
PROJECT=$(echo "${TAG}" | sed 's/@[^@]*$//')
|
||||
if [ -z "$PROJECT" ]; then
|
||||
echo "Error: Could not extract project name from tag"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION=$(echo "${TAG}" | sed 's/.*@//')
|
||||
if [ -z "$VERSION" ]; then
|
||||
echo "Error: Could not extract version from tag"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "project=$PROJECT" >> $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Extracted project: $PROJECT, version: $VERSION"
|
||||
|
||||
auth:
|
||||
needs: extract-project
|
||||
if: needs.extract-project.outputs.project == 'auth'
|
||||
uses: ./.github/workflows/auth_wf_release.yaml
|
||||
with:
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: ${{ needs.extract-project.outputs.version }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
cli:
|
||||
needs: extract-project
|
||||
if: needs.extract-project.outputs.project == 'cli'
|
||||
uses: ./.github/workflows/cli_wf_release.yaml
|
||||
with:
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: ${{ needs.extract-project.outputs.version }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
GH_PAT: ${{ secrets.GH_PAT }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
dashboard:
|
||||
needs: extract-project
|
||||
if: needs.extract-project.outputs.project == '@nhost/dashboard'
|
||||
uses: ./.github/workflows/dashboard_wf_release.yaml
|
||||
with:
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: ${{ needs.extract-project.outputs.version }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
VERCEL_TEAM_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.DASHBOARD_VERCEL_PROJECT_ID }}
|
||||
VERCEL_DEPLOY_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_PRODUCTION }}
|
||||
GH_PAT: ${{ secrets.GH_PAT }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
nhost-js:
|
||||
needs: extract-project
|
||||
if: needs.extract-project.outputs.project == '@nhost/nhost-js'
|
||||
uses: ./.github/workflows/wf_release_npm.yaml
|
||||
with:
|
||||
NAME: nhost-js
|
||||
PATH: packages/nhost-js
|
||||
VERSION: ${{ needs.extract-project.outputs.version }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_PRODUCTION }}
|
||||
|
||||
storage:
|
||||
needs: extract-project
|
||||
if: needs.extract-project.outputs.project == 'storage'
|
||||
uses: ./.github/workflows/storage_wf_release.yaml
|
||||
with:
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: ${{ needs.extract-project.outputs.version }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
73
.github/workflows/ci_update_changelog.yaml
vendored
73
.github/workflows/ci_update_changelog.yaml
vendored
@@ -1,73 +0,0 @@
|
||||
---
|
||||
name: "ci: update changelog"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update-changelog:
|
||||
if: ${{ !startsWith(github.event.head_commit.message, 'release(') }}
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
project: [cli, dashboard, packages/nhost-js, services/auth, services/storage]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
pull-requests: write
|
||||
actions: write
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: ${{ inputs.NAME }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Get next version"
|
||||
id: version
|
||||
run: |
|
||||
cd ${{ matrix.project }}
|
||||
TAG_NAME=$(make release-tag-name)
|
||||
VERSION=$(nix develop .\#cliff -c make changelog-next-version)
|
||||
if git tag | grep -qx "$TAG_NAME@$VERSION"; then
|
||||
echo "Tag $TAG_NAME@$VERSION already exists, skipping release preparation"
|
||||
else
|
||||
echo "Tag $TAG_NAME@$VERSION does not exist, proceeding with release preparation"
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: "Update changelog"
|
||||
if: steps.version.outputs.version != ''
|
||||
run: |
|
||||
cd ${{ matrix.project }}
|
||||
nix develop .\#cliff -c make changelog-update
|
||||
|
||||
- name: "Create Pull Request"
|
||||
if: steps.version.outputs.version != ''
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "release(${{ matrix.project }}): ${{ steps.version.outputs.version }}"
|
||||
title: "release(${{ matrix.project }}): ${{ steps.version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
body: |
|
||||
Automated release preparation for ${{ matrix.project }} version ${{ steps.version.outputs.version }}
|
||||
|
||||
Changes:
|
||||
- Updated CHANGELOG.md
|
||||
branch: release/${{ matrix.project }}
|
||||
delete-branch: true
|
||||
labels: |
|
||||
release,${{ matrix.project }}
|
||||
100
.github/workflows/cli_checks.yaml
vendored
100
.github/workflows/cli_checks.yaml
vendored
@@ -1,100 +0,0 @@
|
||||
---
|
||||
name: "cli: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/cli_checks.yaml'
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/wf_build_artifacts.yaml'
|
||||
- '.github/workflows/cli_test_new_project.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# cli
|
||||
- 'cli/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
test_cli_build:
|
||||
uses: ./.github/workflows/cli_wf_test_new_project.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
122
.github/workflows/cli_wf_release.yaml
vendored
122
.github/workflows/cli_wf_release.yaml
vendored
@@ -1,122 +0,0 @@
|
||||
---
|
||||
name: "cli: release"
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
GIT_REF:
|
||||
required: true
|
||||
type: string
|
||||
VERSION:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
GH_PAT:
|
||||
required: true
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
push-docker:
|
||||
uses: ./.github/workflows/wf_docker_push_image.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
build-multiplatform:
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: cli
|
||||
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 180
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: cli
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Compute common env vars
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
ARCH=$([ "${{ runner.arch }}" == "X64" ] && echo "x86_64" || echo "aarch64")
|
||||
echo "ARCH=${ARCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Build artifact"
|
||||
run: |
|
||||
make build-multiplatform
|
||||
|
||||
- name: "Upload assets"
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PAT }}
|
||||
run: |
|
||||
export VERSION=${{ steps.vars.outputs.VERSION }}
|
||||
|
||||
mkdir upload
|
||||
|
||||
tar cvzf upload/cli-$VERSION-darwin-amd64.tar.gz -C result/darwin/amd64 cli
|
||||
tar cvzf upload/cli-$VERSION-darwin-arm64.tar.gz -C result/darwin/arm64 cli
|
||||
tar cvzf upload/cli-$VERSION-linux-amd64.tar.gz -C result/linux/amd64 cli
|
||||
tar cvzf upload/cli-$VERSION-linux-arm64.tar.gz -C result/linux/arm64 cli
|
||||
|
||||
cd upload
|
||||
find . -type f -exec sha256sum {} + > ../checksums.txt
|
||||
cd ..
|
||||
|
||||
cat checksums.txt
|
||||
|
||||
gh release upload \
|
||||
--clobber "${{ github.ref_name }}" \
|
||||
./upload/* checksums.txt
|
||||
|
||||
- name: "Store Nix cache"
|
||||
uses: ./.github/actions/cache-nix
|
||||
with:
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
if: always()
|
||||
97
.github/workflows/cli_wf_test_new_project.yaml
vendored
97
.github/workflows/cli_wf_test_new_project.yaml
vendored
@@ -1,97 +0,0 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
NHOST_PAT:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
env:
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Collect Workflow Telemetry
|
||||
uses: catchpoint/workflow-telemetry-action@v2
|
||||
with:
|
||||
comment_on_pr: false
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: ${{ inputs.NAME }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Get artifacts"
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
path: ~/artifacts
|
||||
|
||||
- name: "Inspect artifacts"
|
||||
run: find ~/artifacts
|
||||
|
||||
- name: Load docker image
|
||||
run: |
|
||||
skopeo copy --insecure-policy \
|
||||
dir:/home/runner/artifacts/cli-docker-image-x86_64-0.0.0-dev \
|
||||
docker-daemon:cli:0.0.0-dev
|
||||
|
||||
- name: "Create a new project"
|
||||
run: |
|
||||
export NHOST_DOMAIN=staging.nhost.run
|
||||
export NHOST_CONFIGSERVER_IMAGE=cli:0.0.0-dev
|
||||
|
||||
unzip /home/runner/artifacts/cli-artifact-x86_64-0.0.0-dev/result.zip
|
||||
|
||||
mkdir new-project
|
||||
cd new-project
|
||||
/home/runner/_work/nhost/nhost/cli/result/bin/cli login --pat ${{ secrets.NHOST_PAT }}
|
||||
/home/runner/_work/nhost/nhost/cli/result/bin/cli init
|
||||
/home/runner/_work/nhost/nhost/cli/result/bin/cli up --down-on-error
|
||||
/home/runner/_work/nhost/nhost/cli/result/bin/cli down --volumes
|
||||
|
||||
- name: "Store Nix cache"
|
||||
uses: ./.github/actions/cache-nix
|
||||
with:
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
if: always()
|
||||
81
.github/workflows/codegen_checks.yaml
vendored
81
.github/workflows/codegen_checks.yaml
vendored
@@ -1,81 +0,0 @@
|
||||
---
|
||||
name: "codegen: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/codegen_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: codegen
|
||||
PATH: tools/codegen
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: codegen
|
||||
PATH: tools/codegen
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
56
.github/workflows/codeql-analysis.yml
vendored
Normal file
56
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push: {}
|
||||
pull_request: {}
|
||||
schedule:
|
||||
- cron: '20 23 * * 3'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
49
.github/workflows/dashboard.yaml
vendored
Normal file
49
.github/workflows/dashboard.yaml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: 'Dashboard'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: nhost
|
||||
NEXT_PUBLIC_ENV: dev
|
||||
NEXT_TELEMETRY_DISABLED: 1
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- name: Build the application
|
||||
run: pnpm build:dashboard
|
||||
|
||||
tests:
|
||||
name: Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- name: Run tests
|
||||
run: pnpm test:dashboard
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- run: pnpm lint:dashboard
|
||||
143
.github/workflows/dashboard_checks.yaml
vendored
143
.github/workflows/dashboard_checks.yaml
vendored
@@ -1,143 +0,0 @@
|
||||
---
|
||||
name: "dashboard: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/wf_build_artifacts.yaml'
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/dashboard_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# dashboard
|
||||
- "dashboard/**"
|
||||
|
||||
# nhost-js
|
||||
- packages/nhost-js/**
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
deploy-vercel:
|
||||
uses: ./.github/workflows/wf_deploy_vercel.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: dashboard
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
ENVIRONMENT: preview
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
VERCEL_TEAM_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
VERCEL_DEPLOY_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: true
|
||||
OS_MATRIX: '["blacksmith-2vcpu-ubuntu-2404"]'
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
|
||||
e2e_staging:
|
||||
uses: ./.github/workflows/dashboard_wf_e2e_staging.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
- deploy-vercel
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
NHOST_TEST_DASHBOARD_URL: ${{ needs.deploy-vercel.outputs.preview-url }}
|
||||
NHOST_TEST_PROJECT_NAME: ${{ vars.NHOST_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_NAME: ${{ vars.NHOST_TEST_ORGANIZATION_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_SLUG: ${{ vars.NHOST_TEST_ORGANIZATION_SLUG }}
|
||||
NHOST_TEST_PERSONAL_ORG_SLUG: ${{ vars.NHOST_TEST_PERSONAL_ORG_SLUG }}
|
||||
NHOST_TEST_PROJECT_SUBDOMAIN: ${{ vars.NHOST_TEST_PROJECT_SUBDOMAIN }}
|
||||
NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME: ${{ vars.NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME }}
|
||||
NHOST_PRO_TEST_PROJECT_NAME: ${{ vars.NHOST_PRO_TEST_PROJECT_NAME }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
DASHBOARD_VERCEL_DEPLOY_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
DASHBOARD_VERCEL_TEAM_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
DASHBOARD_STAGING_VERCEL_PROJECT_ID: ${{ secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
NHOST_TEST_USER_EMAIL: ${{ secrets.NHOST_TEST_USER_EMAIL }}
|
||||
NHOST_TEST_USER_PASSWORD: ${{ secrets.NHOST_TEST_USER_PASSWORD }}
|
||||
NHOST_TEST_PROJECT_ADMIN_SECRET: ${{ secrets.NHOST_TEST_PROJECT_ADMIN_SECRET }}
|
||||
NHOST_TEST_ONBOARDING_USER: ${{ secrets.NHOST_TEST_ONBOARDING_USER }}
|
||||
PLAYWRIGHT_REPORT_ENCRYPTION_KEY: ${{ secrets.PLAYWRIGHT_REPORT_ENCRYPTION_KEY }}
|
||||
NHOST_TEST_STAGING_SUBDOMAIN: ${{ secrets.NHOST_TEST_STAGING_SUBDOMAIN }}
|
||||
NHOST_TEST_STAGING_REGION: ${{ secrets.NHOST_TEST_STAGING_REGION }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
49
.github/workflows/dashboard_release_staging.yaml
vendored
49
.github/workflows/dashboard_release_staging.yaml
vendored
@@ -1,49 +0,0 @@
|
||||
---
|
||||
name: "dashboard: release staging"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- '.github/workflows/wf_build_artifacts.yaml'
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/dashboard_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# dashboard
|
||||
- "dashboard/**"
|
||||
|
||||
# nhost-js
|
||||
- packages/nhost-js/**
|
||||
|
||||
jobs:
|
||||
deploy-vercel:
|
||||
uses: ./.github/workflows/wf_deploy_vercel.yaml
|
||||
with:
|
||||
NAME: dashboard
|
||||
GIT_REF: ${{ github.sha }}
|
||||
ENVIRONMENT: production
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
VERCEL_TEAM_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
VERCEL_DEPLOY_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_STAGING }}
|
||||
162
.github/workflows/dashboard_wf_e2e_staging.yaml
vendored
162
.github/workflows/dashboard_wf_e2e_staging.yaml
vendored
@@ -1,162 +0,0 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
NHOST_TEST_DASHBOARD_URL:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_PROJECT_NAME:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_ORGANIZATION_NAME:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_ORGANIZATION_SLUG:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_PERSONAL_ORG_SLUG:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_PROJECT_SUBDOMAIN:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_PRO_TEST_PROJECT_NAME:
|
||||
type: string
|
||||
required: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
DASHBOARD_VERCEL_DEPLOY_TOKEN:
|
||||
required: true
|
||||
DASHBOARD_VERCEL_TEAM_ID:
|
||||
required: true
|
||||
DASHBOARD_STAGING_VERCEL_PROJECT_ID:
|
||||
required: true
|
||||
NHOST_TEST_USER_EMAIL:
|
||||
required: true
|
||||
NHOST_TEST_USER_PASSWORD:
|
||||
required: true
|
||||
NHOST_TEST_ONBOARDING_USER:
|
||||
required: true
|
||||
NHOST_TEST_PROJECT_ADMIN_SECRET:
|
||||
required: true
|
||||
PLAYWRIGHT_REPORT_ENCRYPTION_KEY:
|
||||
required: true
|
||||
NHOST_TEST_STAGING_SUBDOMAIN:
|
||||
required: true
|
||||
NHOST_TEST_STAGING_REGION:
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: dashboard-e2e-staging
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
NEXT_PUBLIC_ENV: dev
|
||||
NEXT_TELEMETRY_DISABLED: 1
|
||||
NHOST_TEST_DASHBOARD_URL: ${{ inputs.NHOST_TEST_DASHBOARD_URL }}
|
||||
NHOST_TEST_PROJECT_NAME: ${{ inputs.NHOST_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_NAME: ${{ inputs.NHOST_TEST_ORGANIZATION_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_SLUG: ${{ inputs.NHOST_TEST_ORGANIZATION_SLUG }}
|
||||
NHOST_TEST_PERSONAL_ORG_SLUG: ${{ inputs.NHOST_TEST_PERSONAL_ORG_SLUG }}
|
||||
NHOST_TEST_PROJECT_SUBDOMAIN: ${{ inputs.NHOST_TEST_PROJECT_SUBDOMAIN }}
|
||||
NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME: ${{ inputs.NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME }}
|
||||
NHOST_PRO_TEST_PROJECT_NAME: ${{ inputs.NHOST_PRO_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_USER_EMAIL: ${{ secrets.NHOST_TEST_USER_EMAIL }}
|
||||
NHOST_TEST_USER_PASSWORD: ${{ secrets.NHOST_TEST_USER_PASSWORD }}
|
||||
NHOST_TEST_PROJECT_ADMIN_SECRET: ${{ secrets.NHOST_TEST_PROJECT_ADMIN_SECRET }}
|
||||
NHOST_TEST_ONBOARDING_USER: ${{ secrets.NHOST_TEST_ONBOARDING_USER }}
|
||||
NHOST_TEST_STAGING_SUBDOMAIN: ${{ secrets.NHOST_TEST_STAGING_SUBDOMAIN }}
|
||||
NHOST_TEST_STAGING_REGION: ${{ secrets.NHOST_TEST_STAGING_REGION }}
|
||||
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Collect Workflow Telemetry
|
||||
uses: catchpoint/workflow-telemetry-action@v2
|
||||
with:
|
||||
comment_on_pr: false
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: ${{ inputs.NAME }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Start CLI
|
||||
run: |
|
||||
nix develop .\#dashboard -c make dev-env-cli-up
|
||||
|
||||
- name: Run e2e tests
|
||||
run: nix develop .\#dashboard -c pnpm e2e
|
||||
|
||||
- name: Run e2e onboarding tests
|
||||
run: nix develop .\#dashboard -c pnpm e2e:onboarding
|
||||
|
||||
- name: Run e2e local tests
|
||||
run: nix develop .\#dashboard -c pnpm e2e:local
|
||||
|
||||
- name: Encrypt Playwright report
|
||||
if: failure()
|
||||
run: |
|
||||
tar -czf playwright-report.tar.gz playwright-report/
|
||||
openssl enc -aes-256-cbc -salt -pbkdf2 -iter 100000 \
|
||||
-in playwright-report.tar.gz \
|
||||
-out playwright-report.tar.gz.enc \
|
||||
-k "${{ secrets.PLAYWRIGHT_REPORT_ENCRYPTION_KEY }}"
|
||||
rm playwright-report.tar.gz
|
||||
|
||||
- name: Upload encrypted Playwright report
|
||||
uses: actions/upload-artifact@v5
|
||||
if: failure()
|
||||
with:
|
||||
name: encrypted-playwright-report-${{ github.run_id }}
|
||||
path: dashboard/playwright-report.tar.gz.enc
|
||||
retention-days: 1
|
||||
|
||||
- name: "Store Nix cache"
|
||||
uses: ./.github/actions/cache-nix
|
||||
with:
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
if: always()
|
||||
105
.github/workflows/dashboard_wf_release.yaml
vendored
105
.github/workflows/dashboard_wf_release.yaml
vendored
@@ -1,105 +0,0 @@
|
||||
---
|
||||
name: 'dashboard: release'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
GIT_REF:
|
||||
required: true
|
||||
type: string
|
||||
VERSION:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
VERCEL_TEAM_ID:
|
||||
required: true
|
||||
VERCEL_PROJECT_ID:
|
||||
required: true
|
||||
VERCEL_DEPLOY_TOKEN:
|
||||
required: true
|
||||
DISCORD_WEBHOOK:
|
||||
required: false
|
||||
GH_PAT:
|
||||
required: true
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
deploy-vercel:
|
||||
uses: ./.github/workflows/wf_deploy_vercel.yaml
|
||||
with:
|
||||
NAME: dashboard
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
ENVIRONMENT: production
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
VERCEL_TEAM_ID: ${{ secrets.VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
|
||||
VERCEL_DEPLOY_TOKEN: ${{ secrets.VERCEL_DEPLOY_TOKEN }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
push-docker:
|
||||
uses: ./.github/workflows/wf_docker_push_image.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
bump-cli:
|
||||
name: Bump Dashboard version in the Nhost CLI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- push-docker
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
|
||||
- name: Bump version in source code
|
||||
run: |
|
||||
find cli -type f -exec sed -i 's/"nhost\/dashboard:[^"]*"/"nhost\/dashboard:${{ inputs.VERSION }}"/g' {} +
|
||||
sed -i 's/nhost\/dashboard:[^)]*/nhost\/dashboard:${{ inputs.VERSION }}/g' docs/reference/cli/commands.mdx
|
||||
|
||||
- name: "Create Pull Request"
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
title: "chore(cli): bump nhost/dashboard to ${{ inputs.VERSION }}"
|
||||
commit-message: "chore: bump nhost/dashboard to ${{ inputs.VERSION }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
body: |
|
||||
This PR bumps the Nhost Dashboard Docker image to version ${{ inputs.VERSION }}.
|
||||
branch: bump-dashboard-version
|
||||
base: main
|
||||
delete-branch: true
|
||||
58
.github/workflows/deploy-dashboard.yaml
vendored
Normal file
58
.github/workflows/deploy-dashboard.yaml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: 'dashboard: release form'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
git_ref:
|
||||
type: string
|
||||
description: 'Branch, tag, or commit SHA'
|
||||
required: true
|
||||
|
||||
environment:
|
||||
type: choice
|
||||
description: 'Deployment environment'
|
||||
required: true
|
||||
default: staging
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
|
||||
workflow_call:
|
||||
inputs:
|
||||
git_ref:
|
||||
required: true
|
||||
type: string
|
||||
environment:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish-vercel:
|
||||
name: Publish to Vercel
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.git_ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
|
||||
- name: Setup Vercel CLI
|
||||
run: pnpm add -g vercel
|
||||
|
||||
- name: Trigger Vercel deployment
|
||||
env:
|
||||
VERCEL_ORG_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ inputs.environment == 'production' && secrets.DASHBOARD_VERCEL_PROJECT_ID || secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
run: |
|
||||
echo "Deploying to: ${{ inputs.environment }}..."
|
||||
vercel pull --environment=production --token=${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
vercel build --prod --token=${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
vercel deploy --prebuilt --prod --token=${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
81
.github/workflows/docs_checks.yaml
vendored
81
.github/workflows/docs_checks.yaml
vendored
@@ -1,81 +0,0 @@
|
||||
---
|
||||
name: "docs: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/dashboard_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# docs
|
||||
- docs/**
|
||||
|
||||
# nhost-js
|
||||
- packages/nhost-js/**
|
||||
|
||||
# apis
|
||||
- 'services/auth/docs/openapi.yaml'
|
||||
- 'services/storage/controller/openapi.yaml'
|
||||
|
||||
# cli
|
||||
- cli/**
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: docs
|
||||
PATH: docs
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
98
.github/workflows/examples_demos_checks.yaml
vendored
98
.github/workflows/examples_demos_checks.yaml
vendored
@@ -1,98 +0,0 @@
|
||||
---
|
||||
name: "examples/demos: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/examples_demos_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# nhpst-js
|
||||
- 'packages/nhost-js/**'
|
||||
|
||||
# demos
|
||||
- 'examples/demos/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: demos
|
||||
PATH: examples/demos
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: demos
|
||||
PATH: examples/demos
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
OS_MATRIX: '["blacksmith-2vcpu-ubuntu-2404"]'
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
98
.github/workflows/examples_guides_checks.yaml
vendored
98
.github/workflows/examples_guides_checks.yaml
vendored
@@ -1,98 +0,0 @@
|
||||
---
|
||||
name: "examples/guides: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/examples_guides_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# nhpst-js
|
||||
- 'packages/nhost-js/**'
|
||||
|
||||
# guides
|
||||
- 'examples/guides/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: guides
|
||||
PATH: examples/guides
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: guides
|
||||
PATH: examples/guides
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
OS_MATRIX: '["blacksmith-2vcpu-ubuntu-2404"]'
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
98
.github/workflows/examples_tutorials_checks.yaml
vendored
98
.github/workflows/examples_tutorials_checks.yaml
vendored
@@ -1,98 +0,0 @@
|
||||
---
|
||||
name: "examples/tutorials: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/examples_tutorials_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# nhpst-js
|
||||
- 'packages/nhost-js/**'
|
||||
|
||||
# tutorials
|
||||
- 'examples/tutorials/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: tutorials
|
||||
PATH: examples/tutorials
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: tutorials
|
||||
PATH: examples/tutorials
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
OS_MATRIX: '["blacksmith-2vcpu-ubuntu-2404"]'
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
15
.github/workflows/gen_ai_review.yaml
vendored
15
.github/workflows/gen_ai_review.yaml
vendored
@@ -1,13 +1,13 @@
|
||||
---
|
||||
name: "gen: AI review"
|
||||
on:
|
||||
pull_request_target:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review]
|
||||
issue_comment:
|
||||
jobs:
|
||||
pr_agent_job:
|
||||
if: ${{ github.event.sender.type != 'Bot' }}
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
issues: write
|
||||
@@ -16,13 +16,12 @@ jobs:
|
||||
steps:
|
||||
- name: PR Agent action step
|
||||
id: pragent
|
||||
uses: Codium-ai/pr-agent@v0.30
|
||||
uses: Codium-ai/pr-agent@v0.26
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENAI_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
config.model: ${{ vars.GEN_AI_MODEL }}
|
||||
config.model_turbo: $${{ vars.GEN_AI_MODEL_TURBO }}
|
||||
config.max_model_tokens: 200000
|
||||
config.custom_model_max_tokens: 200000
|
||||
ignore.glob: "['pnpm-lock.yaml','**/pnpm-lock.yaml', 'vendor/**','**/client_gen.go','**/models_gen.go','**/generated.go','**/*.gen.go']"
|
||||
config.max_model_tokens: 100000
|
||||
config.model: "anthropic/claude-3-5-sonnet-20240620"
|
||||
config.model_turbo: "anthropic/claude-3-5-sonnet-20240620"
|
||||
ignore.glob: "['pnpm-lock.yaml','**/pnpm-lock.yaml']"
|
||||
|
||||
54
.github/workflows/gen_codeql-analysis.yml
vendored
54
.github/workflows/gen_codeql-analysis.yml
vendored
@@ -1,54 +0,0 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '20 23 * * 3'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript', 'go' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v4
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v4
|
||||
10
.github/workflows/gen_schedule_update_deps.yaml
vendored
10
.github/workflows/gen_schedule_update_deps.yaml
vendored
@@ -15,15 +15,17 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: nixbuild/nix-quick-install-action@v34
|
||||
- uses: nixbuild/nix-quick-install-action@v26
|
||||
with:
|
||||
nix_version: 2.16.2
|
||||
nix_conf: |
|
||||
@@ -51,7 +53,7 @@ jobs:
|
||||
"
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: Update dependencies
|
||||
|
||||
15
.github/workflows/labeler.yaml
vendored
Normal file
15
.github/workflows/labeler.yaml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: 'Pull Request Labeler'
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v4
|
||||
with:
|
||||
repo-token: '${{ secrets.GH_PAT }}'
|
||||
sync-labels: ''
|
||||
98
.github/workflows/nhost-js_checks.yaml
vendored
98
.github/workflows/nhost-js_checks.yaml
vendored
@@ -1,98 +0,0 @@
|
||||
---
|
||||
name: "nhost-js: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/nhost-js_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# nhost-js
|
||||
- 'packages/nhost-js/**'
|
||||
|
||||
# apis
|
||||
- 'services/auth/docs/openapi.yaml'
|
||||
- 'services/storage/controller/openapi.yaml'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: nhost-js
|
||||
PATH: packages/nhost-js
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: nhost-js
|
||||
PATH: packages/nhost-js
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
73
.github/workflows/nixops_checks.yaml
vendored
73
.github/workflows/nixops_checks.yaml
vendored
@@ -1,73 +0,0 @@
|
||||
---
|
||||
name: "nixops: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/nixops_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: nixops
|
||||
PATH: nixops
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: nixops
|
||||
PATH: nixops
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
35
.github/workflows/nixops_wf_release.yaml
vendored
35
.github/workflows/nixops_wf_release.yaml
vendored
@@ -1,35 +0,0 @@
|
||||
---
|
||||
name: "nixops: release"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'flake.lock'
|
||||
- 'nixops/project.nix'
|
||||
|
||||
jobs:
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
NAME: nixops
|
||||
PATH: nixops
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
VERSION: latest
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
push-docker:
|
||||
uses: ./.github/workflows/wf_docker_push_image.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: nixops
|
||||
PATH: nixops
|
||||
VERSION: latest
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
84
.github/workflows/storage_checks.yaml
vendored
84
.github/workflows/storage_checks.yaml
vendored
@@ -1,84 +0,0 @@
|
||||
---
|
||||
name: "storage: check and build"
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- '.github/workflows/storage_checks.yaml'
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/wf_build_artifacts.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'internal/lib/**'
|
||||
- 'vendor/**'
|
||||
|
||||
# storage
|
||||
- 'services/storage/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && format('pr-{0}', github.event.pull_request.number) || format('push-{0}', github.sha) }}
|
||||
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: storage
|
||||
PATH: services/storage
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: storage
|
||||
PATH: services/storage
|
||||
GIT_REF: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.sha || github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
60
.github/workflows/storage_wf_release.yaml
vendored
60
.github/workflows/storage_wf_release.yaml
vendored
@@ -1,60 +0,0 @@
|
||||
---
|
||||
name: "storage: release"
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
GIT_REF:
|
||||
required: true
|
||||
type: string
|
||||
VERSION:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
NAME: storage
|
||||
PATH: services/storage
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
push-docker-hub:
|
||||
uses: ./.github/workflows/wf_docker_push_image.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: storage
|
||||
PATH: services/storage
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
push-docker-ecr:
|
||||
uses: ./.github/workflows/wf_docker_push_image_ecr.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: storage
|
||||
PATH: services/storage
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
CONTAINER_REGISTRY: ${{ secrets.AWS_ACCOUNT_ID }}.dkr.ecr.eu-central-1.amazonaws.com
|
||||
79
.github/workflows/test-nhost-cli-action.yaml
vendored
Normal file
79
.github/workflows/test-nhost-cli-action.yaml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
name: Test Nhost CLI action
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- '.github/actions/nhost-cli/**'
|
||||
- '!.github/actions/nhost-cli/**/*.md'
|
||||
|
||||
jobs:
|
||||
install:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
- name: should succeed running the nhost command
|
||||
run: nhost
|
||||
|
||||
start:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI and start the application
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
init: true
|
||||
start: true
|
||||
- name: should be running
|
||||
run: curl -sSf 'https://local.hasura.local.nhost.run/' > /dev/null
|
||||
|
||||
stop:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI, start and stop the application
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
init: true
|
||||
start: true
|
||||
stop: true
|
||||
- name: should have no live docker container
|
||||
run: |
|
||||
if [ -z "docker ps -q" ]; then
|
||||
echo "Some docker containers are still running"
|
||||
docker ps
|
||||
exit 1
|
||||
fi
|
||||
|
||||
config:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI and run the application
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
init: true
|
||||
version: v1.29.3
|
||||
start: true
|
||||
- name: should find the injected hasura-auth version
|
||||
run: |
|
||||
VERSION=$(curl -sSf 'https://local.auth.local.nhost.run/v1/version')
|
||||
EXPECTED_VERSION='{"version":"0.36.1"}'
|
||||
if [ "$VERSION" != "$EXPECTED_VERSION" ]; then
|
||||
echo "Expected version $EXPECTED_VERSION but got $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
version: v1.27.2
|
||||
- name: should find the correct version
|
||||
run: nhost --version | head -n 1 | grep v1.27.2 || exit 1
|
||||
114
.github/workflows/wf_build_artifacts.yaml
vendored
114
.github/workflows/wf_build_artifacts.yaml
vendored
@@ -1,114 +0,0 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
DOCKER:
|
||||
type: boolean
|
||||
required: true
|
||||
OS_MATRIX:
|
||||
type: string
|
||||
required: false
|
||||
default: '["blacksmith-4vcpu-ubuntu-2404-arm", "blacksmith-2vcpu-ubuntu-2404"]'
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
artifacts:
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: ${{ fromJSON(inputs.OS_MATRIX) }}
|
||||
fail-fast: true
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 180
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: "Validate PR title"
|
||||
uses: ./.github/actions/validate-pr-title
|
||||
with:
|
||||
pr_title: ${{ github.event.pull_request.title }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: ${{ inputs.NAME }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Compute common env vars
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
ARCH=$([ "${{ runner.arch }}" == "X64" ] && echo "x86_64" || echo "aarch64")
|
||||
echo "ARCH=${ARCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Build artifact"
|
||||
run: |
|
||||
make build
|
||||
zip -r result.zip result
|
||||
|
||||
- name: "Push artifact to artifact repository"
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ inputs.NAME }}-artifact-${{ steps.vars.outputs.ARCH }}-${{ steps.vars.outputs.VERSION }}
|
||||
path: ${{ inputs.PATH }}/result.zip
|
||||
retention-days: 7
|
||||
|
||||
- name: "Build docker image"
|
||||
run: |
|
||||
sudo chmod 755 /run/containers
|
||||
sudo mkdir -p "/run/containers/$(id -u runner)"
|
||||
sudo chown runner: "/run/containers/$(id -u runner)"
|
||||
make build-docker-image
|
||||
if: ${{ ( inputs.DOCKER ) }}
|
||||
|
||||
- name: "Push docker image to artifact repository"
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: ${{ inputs.NAME }}-docker-image-${{ steps.vars.outputs.ARCH }}-${{ steps.vars.outputs.VERSION }}
|
||||
path: ${{ inputs.PATH }}/result
|
||||
retention-days: 7
|
||||
if: ${{ ( inputs.DOCKER ) }}
|
||||
|
||||
- name: "Store Nix cache"
|
||||
uses: ./.github/actions/cache-nix
|
||||
with:
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
if: always()
|
||||
95
.github/workflows/wf_check.yaml
vendored
95
.github/workflows/wf_check.yaml
vendored
@@ -1,95 +0,0 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
NHOST_PAT:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
env:
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: "Validate PR title"
|
||||
uses: ./.github/actions/validate-pr-title
|
||||
with:
|
||||
pr_title: ${{ github.event.pull_request.title }}
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
- name: Collect Workflow Telemetry
|
||||
uses: catchpoint/workflow-telemetry-action@v2
|
||||
with:
|
||||
comment_on_pr: false
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: ${{ inputs.NAME }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Verify if we need to build"
|
||||
id: verify-build
|
||||
run: |
|
||||
export drvPath=$(make check-dry-run)
|
||||
echo "Derivation path: $drvPath"
|
||||
nix path-info --store s3://nhost-nix-cache\?region=eu-central-1 $drvPath \
|
||||
&& export BUILD_NEEDED=no \
|
||||
|| export BUILD_NEEDED=yes
|
||||
echo BUILD_NEEDED=$BUILD_NEEDED >> $GITHUB_OUTPUT
|
||||
echo DERIVATION_PATH=$drvPath >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Start containters for integration tests"
|
||||
run: |
|
||||
nix develop .\#${{ inputs.NAME }} -c make dev-env-up
|
||||
if: ${{ steps.verify-build.outputs.BUILD_NEEDED == 'yes' }}
|
||||
|
||||
- name: "Run checks"
|
||||
run: make check
|
||||
if: ${{ steps.verify-build.outputs.BUILD_NEEDED == 'yes' }}
|
||||
|
||||
- name: "Store Nix cache"
|
||||
uses: ./.github/actions/cache-nix
|
||||
with:
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
if: always()
|
||||
120
.github/workflows/wf_deploy_vercel.yaml
vendored
120
.github/workflows/wf_deploy_vercel.yaml
vendored
@@ -1,120 +0,0 @@
|
||||
name: 'deploy to vercel'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
required: true
|
||||
type: string
|
||||
GIT_REF:
|
||||
required: true
|
||||
type: string
|
||||
ENVIRONMENT:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
VERCEL_TEAM_ID:
|
||||
required: true
|
||||
VERCEL_PROJECT_ID:
|
||||
required: true
|
||||
VERCEL_DEPLOY_TOKEN:
|
||||
required: true
|
||||
DISCORD_WEBHOOK:
|
||||
required: false
|
||||
TURBO_TOKEN:
|
||||
required: true
|
||||
|
||||
outputs:
|
||||
preview-url:
|
||||
description: "The preview URL from Vercel deployment"
|
||||
value: ${{ jobs.publish-vercel.outputs.preview-url }}
|
||||
|
||||
jobs:
|
||||
publish-vercel:
|
||||
name: Publish to Vercel
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
outputs:
|
||||
preview-url: ${{ steps.deploy.outputs.preview-url }} # Add this line
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: ${{ inputs.NAME }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Trigger Vercel deployment
|
||||
id: deploy
|
||||
env:
|
||||
VERCEL_ORG_ID: ${{ secrets.VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: nhost
|
||||
run: |
|
||||
TARGET_OPTS="--target=${{ inputs.ENVIRONMENT }}"
|
||||
echo "Deploying to: ${{ inputs.ENVIRONMENT }}..."
|
||||
nix develop .\#vercel -c \
|
||||
vercel pull --environment=${{ inputs.ENVIRONMENT }} --token=${{ secrets.VERCEL_DEPLOY_TOKEN }}
|
||||
nix develop .\#vercel -c \
|
||||
vercel build $TARGET_OPTS --token=${{ secrets.VERCEL_DEPLOY_TOKEN }}
|
||||
nix develop .\#vercel -c \
|
||||
vercel deploy $TARGET_OPTS --prebuilt --token=${{ secrets.VERCEL_DEPLOY_TOKEN }} | tee /tmp/vercel_output
|
||||
|
||||
PREVIEW_URL=$(cat /tmp/vercel_output)
|
||||
echo "\n🔗🔗🔗 Preview URL: $PREVIEW_URL"
|
||||
echo "preview-url=$PREVIEW_URL" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
header: "vercel-${{ inputs.NAME }}-${{ inputs.ENVIRONMENT }}"
|
||||
message: |
|
||||
# Vercel Deployment Info - ${{ inputs.NAME }}
|
||||
|
||||
* URL: ${{ steps.deploy.outputs.preview-url }}
|
||||
* Git Ref: `${{ inputs.GIT_REF }}`
|
||||
* Commit: `${{ github.event.pull_request.head.sha || github.sha }}`
|
||||
|
||||
if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target'
|
||||
|
||||
- name: Send Discord notification
|
||||
if: always()
|
||||
uses: ./.github/actions/discord-notification
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
title: "Deployed ${{ inputs.NAME }} to Vercel"
|
||||
description: |
|
||||
**Environment**: ${{ inputs.ENVIRONMENT }}
|
||||
**URL**: ${{ steps.deploy.outputs.preview-url }}
|
||||
**Triggered by**: ${{ github.actor }}
|
||||
**Status**: ${{ job.status }}
|
||||
|
||||
**Details**:
|
||||
- Git Ref: ${{ inputs.GIT_REF }}
|
||||
- Commit: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
color: ${{ job.status == 'success' && '5763719' || '15548997' }}
|
||||
|
||||
- run: rm -rf .vercel
|
||||
if: always()
|
||||
79
.github/workflows/wf_docker_push_image.yaml
vendored
79
.github/workflows/wf_docker_push_image.yaml
vendored
@@ -1,79 +0,0 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
push-to-registry:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: "Compute common env vars"
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get artifacts"
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
path: ~/artifacts
|
||||
|
||||
- name: "Inspect artifacts"
|
||||
run: find ~/artifacts
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: "Push docker image to docker hub"
|
||||
run: |
|
||||
export NAME=${{ inputs.NAME }}
|
||||
export VERSION=${{ steps.vars.outputs.VERSION }}
|
||||
export CONTAINER_REGISTRY=nhost
|
||||
export CONTAINER_NAME=$CONTAINER_REGISTRY/$NAME
|
||||
|
||||
for ARCH in "x86_64" "aarch64"; do
|
||||
skopeo copy --insecure-policy \
|
||||
dir:/home/runner/artifacts/${{ inputs.NAME }}-docker-image-$ARCH-$VERSION \
|
||||
docker-daemon:$CONTAINER_NAME:$VERSION-$ARCH
|
||||
docker push $CONTAINER_NAME:$VERSION-$ARCH
|
||||
done
|
||||
|
||||
docker manifest create \
|
||||
$CONTAINER_NAME:$VERSION \
|
||||
--amend $CONTAINER_NAME:$VERSION-x86_64 \
|
||||
--amend $CONTAINER_NAME:$VERSION-aarch64
|
||||
|
||||
docker manifest push $CONTAINER_NAME:$VERSION
|
||||
84
.github/workflows/wf_docker_push_image_ecr.yaml
vendored
84
.github/workflows/wf_docker_push_image_ecr.yaml
vendored
@@ -1,84 +0,0 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
CONTAINER_REGISTRY:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
push-to-registry:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: "Login to Amazon ECR"
|
||||
uses: aws-actions/amazon-ecr-login@v2
|
||||
with:
|
||||
mask-password: 'true'
|
||||
|
||||
- name: "Compute common env vars"
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get artifacts"
|
||||
uses: actions/download-artifact@v6
|
||||
with:
|
||||
path: ~/artifacts
|
||||
|
||||
- name: "Inspect artifacts"
|
||||
run: find ~/artifacts
|
||||
|
||||
- name: "Push docker image to docker hub"
|
||||
run: |
|
||||
export NAME=${{ inputs.NAME }}
|
||||
export VERSION=${{ steps.vars.outputs.VERSION }}
|
||||
export CONTAINER_REGISTRY=${{ secrets.CONTAINER_REGISTRY }}
|
||||
export CONTAINER_NAME=$CONTAINER_REGISTRY/$NAME
|
||||
|
||||
for ARCH in "x86_64" "aarch64"; do
|
||||
skopeo copy --insecure-policy \
|
||||
dir:/home/runner/artifacts/${{ inputs.NAME }}-docker-image-$ARCH-$VERSION \
|
||||
docker-daemon:$CONTAINER_NAME:$VERSION-$ARCH
|
||||
docker push $CONTAINER_NAME:$VERSION-$ARCH
|
||||
done
|
||||
|
||||
docker manifest create \
|
||||
$CONTAINER_NAME:$VERSION \
|
||||
--amend $CONTAINER_NAME:$VERSION-x86_64 \
|
||||
--amend $CONTAINER_NAME:$VERSION-aarch64
|
||||
|
||||
docker manifest push $CONTAINER_NAME:$VERSION
|
||||
95
.github/workflows/wf_release_npm.yaml
vendored
95
.github/workflows/wf_release_npm.yaml
vendored
@@ -1,95 +0,0 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
secrets:
|
||||
NPM_TOKEN:
|
||||
required: true
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
DISCORD_WEBHOOK:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v5
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: Setup Nix with Cache
|
||||
uses: ./.github/actions/setup-nix
|
||||
with:
|
||||
NAME: ${{ inputs.NAME }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Build package"
|
||||
run: make build
|
||||
|
||||
- name: "Copy build output"
|
||||
run: cp -r result/dist .
|
||||
|
||||
- name: "Set package version"
|
||||
run: |
|
||||
nix develop .#pnpm -c pnpm version ${{ inputs.VERSION }} --no-git-tag-version
|
||||
|
||||
- name: "Determine npm tag"
|
||||
id: npm-tag
|
||||
run: |
|
||||
if [[ "${{ inputs.VERSION }}" =~ (alpha|beta|dev|rc) ]]; then
|
||||
echo "tag=beta" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=latest" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: "Publish to npm"
|
||||
run: |
|
||||
echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc
|
||||
nix develop .#pnpm -c pnpm publish --tag ${{ steps.npm-tag.outputs.tag }} --no-git-checks
|
||||
|
||||
- name: Send Discord notification
|
||||
if: always()
|
||||
uses: ./.github/actions/discord-notification
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
title: "Published ${{ inputs.NAME }}@${{ inputs.VERSION }} to npm"
|
||||
description: |
|
||||
**Status**: ${{ job.status }}
|
||||
**Tag**: ${{ steps.npm-tag.outputs.tag }}
|
||||
**Triggered by**: ${{ github.actor }}
|
||||
|
||||
**Details**:
|
||||
- Version: ${{ inputs.VERSION }}
|
||||
- Package: ${{ inputs.NAME }}
|
||||
color: ${{ job.status == 'success' && '5763719' || '15548997' }}
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -65,11 +65,3 @@ out/
|
||||
.direnv/
|
||||
|
||||
/.vscode/
|
||||
|
||||
result
|
||||
|
||||
.vitest
|
||||
|
||||
.claude
|
||||
|
||||
letsencrypt/*
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
version: "2"
|
||||
issues:
|
||||
max-issues-per-linter: 0
|
||||
max-same-issues: 0
|
||||
linters:
|
||||
default: all
|
||||
settings:
|
||||
funlen:
|
||||
lines: 65
|
||||
wsl_v5:
|
||||
allow-whole-block: true
|
||||
disable:
|
||||
- canonicalheader
|
||||
- depguard
|
||||
- gomoddirectives
|
||||
- musttag
|
||||
- nlreturn
|
||||
- tagliatelle
|
||||
- varnamelen
|
||||
- wsl
|
||||
- noinlineerr
|
||||
- funcorder
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
rules:
|
||||
# general rules
|
||||
- linters:
|
||||
- funlen
|
||||
- ireturn
|
||||
- goconst
|
||||
path: _test\.go
|
||||
- linters:
|
||||
- lll
|
||||
source: '^//go:generate '
|
||||
- linters:
|
||||
- gochecknoglobals
|
||||
text: Version is a global variable
|
||||
- linters:
|
||||
- ireturn
|
||||
- lll
|
||||
path: schema\.resolvers\.go
|
||||
|
||||
# storage service specific rules
|
||||
- linters:
|
||||
- gochecknoglobals
|
||||
- gochecknoinits
|
||||
path: services/storage/cmd/
|
||||
- linters:
|
||||
- gochecknoglobals
|
||||
path: services/storage/cmd/controller/version.go
|
||||
- linters:
|
||||
- funlen
|
||||
- ireturn
|
||||
- exhaustruct
|
||||
path: services/storage/.*_test\.go
|
||||
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
|
||||
formatters:
|
||||
enable:
|
||||
- gofmt
|
||||
- gofumpt
|
||||
- goimports
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
- schema\.resolvers\.go
|
||||
8
.npmrc
8
.npmrc
@@ -1,8 +1,2 @@
|
||||
prefer-workspace-packages = true
|
||||
auto-install-peers = true
|
||||
|
||||
# without this setting, pnpm breaks monorepos with multiple versions of the same package
|
||||
shared-workspace-lockfile = false
|
||||
|
||||
# do not enable back, this leads to unlisted dependencies being used
|
||||
hoist = false
|
||||
auto-install-peers = false
|
||||
@@ -16,15 +16,6 @@ Contributions are made to Nhost repos via Issues and Pull Requests (PRs). A few
|
||||
- We work hard to make sure issues are handled on time, but it could take a while to investigate the root cause depending on the impact. A friendly ping in the comment thread to the submitter or a contributor can help draw attention if your issue is blocking.
|
||||
- If you've never contributed before, see [the first-timer's guide](https://github.com/firstcontributions/first-contributions) for resources and tips on getting started.
|
||||
|
||||
### AI-Assisted Contributions
|
||||
|
||||
We have specific policies regarding AI-assisted contributions:
|
||||
|
||||
- **Issues**: Bug reports and feature requests that are clearly AI-generated will not be accepted and will be closed immediately. Please write your issues in your own words to ensure they are clear, specific, and contain the necessary context.
|
||||
- **Pull Requests**: Contributions with the help of AI are permitted, but you are ultimately responsible for the quality of your submission and for ensuring it follows our contributing guidelines. The PR description must be written in your own words. Additionally, please remove any superfluous code comments introduced by AI tools before submitting. PRs that clearly violate this rule will be closed without further review.
|
||||
|
||||
In all cases, contributors must ensure their submissions are thoughtful, well-tested, and meet the project's quality standards.
|
||||
|
||||
### Issues
|
||||
|
||||
Issues should be used to report problems with Nhost, request a new feature, or discuss potential changes before a PR is created.
|
||||
@@ -33,20 +24,28 @@ If you find an Issue that addresses the problem you're having, please add your r
|
||||
|
||||
### Pull Requests
|
||||
|
||||
Please have a look at our [developers guide](https://github.com/nhost/nhost/blob/main/DEVELOPERS.md) to start coding!
|
||||
|
||||
PRs to our libraries are always welcome and can be a quick way to get your fix or improvement slated for the next release. In general, PRs should:
|
||||
|
||||
## Monorepo Structure
|
||||
- Only fix/add the functionality in question **OR** address wide-spread whitespace/style issues, not both.
|
||||
- Add unit or integration tests for fixed or changed functionality (if a test suite exists).
|
||||
- Address a single concern in the least number of changed lines as possible.
|
||||
- Include documentation in the repo or on our [docs site](https://docs.nhost.io/get-started).
|
||||
- Be accompanied by a complete Pull Request template (loaded automatically when a PR is created).
|
||||
|
||||
This repository is a monorepo that contains multiple packages and applications. The structure is as follows:
|
||||
For changes that address core functionality or require breaking changes (e.g., a major release), it's best to open an Issue to discuss your proposal first. This is not required but can save time creating and reviewing changes.
|
||||
|
||||
- `cli` - The Nhost CLI
|
||||
- `dashboard` - The Nhost Dashboard
|
||||
- `docs` - Documentation
|
||||
- `examples` - Various example projects
|
||||
- `packages/nhost-js` - The Nhost JavaScript/TypeScript SDK
|
||||
- `services/auth` - Nhost Authentication service
|
||||
- `services/storage` - Nhost Storage service
|
||||
- `tools/codegen` - Internal code generation tool to build the SDK
|
||||
- `tools/mintlify-openapi` - Internal tool to generate reference documentation for Mintlify from an OpenAPI spec.
|
||||
In general, we follow the ["fork-and-pull" Git workflow](https://github.com/susam/gitpr)
|
||||
|
||||
For details about those projects and how to contribure, please refer to their respective `README.md` and `CONTRIBUTING.md` files.
|
||||
1. Fork the repository to your own Github account
|
||||
2. Clone the project to your machine
|
||||
3. Create a branch locally with a succinct but descriptive name. All changes should be part of a branch and submitted as a pull request - your branches should be prefixed with one of:
|
||||
- `bug/` for bug fixes
|
||||
- `feat/` for features
|
||||
- `chore/` for configuration changes
|
||||
- `docs/` for documentation changes
|
||||
4. Commit changes to the branch
|
||||
5. Following any formatting and testing guidelines specific to this repo
|
||||
6. Push changes to your fork
|
||||
7. Open a PR in our repository and follow the PR template to review the changes efficiently.
|
||||
|
||||
163
DEVELOPERS.md
Normal file
163
DEVELOPERS.md
Normal file
@@ -0,0 +1,163 @@
|
||||
# Developer Guide
|
||||
|
||||
## Requirements
|
||||
|
||||
### Node.js v18
|
||||
|
||||
_⚠️ Node.js v16 is also supported for the time being but support will be dropped in the near future_.
|
||||
|
||||
### [pnpm](https://pnpm.io/) package manager
|
||||
|
||||
The easiest way to install `pnpm` if it's not installed on your machine yet is to use `npm`:
|
||||
|
||||
```sh
|
||||
$ npm install -g pnpm
|
||||
```
|
||||
|
||||
### [Nhost CLI](https://docs.nhost.io/cli)
|
||||
|
||||
- The CLI is primarily used for running the E2E tests
|
||||
- Please refer to the [installation guide](https://docs.nhost.io/get-started/cli-workflow/install-cli) if you have not installed it yet
|
||||
|
||||
## File Structure
|
||||
|
||||
The repository is organized as a monorepo, with the following structure (only relevant folders are shown):
|
||||
|
||||
```
|
||||
assets/ # Assets used in the README
|
||||
config/ # Configuration files for the monorepo
|
||||
dashboard/ # Dashboard
|
||||
docs/ # Documentation website
|
||||
examples/ # Example projects
|
||||
packages/ # Core packages
|
||||
integrations/ # These are packages that rely on the core packages
|
||||
```
|
||||
|
||||
## Get started
|
||||
|
||||
### Installation
|
||||
|
||||
First, clone this repository:
|
||||
|
||||
```sh
|
||||
git clone https://github.com/nhost/nhost
|
||||
```
|
||||
|
||||
Then, install the dependencies with `pnpm`:
|
||||
|
||||
```sh
|
||||
$ cd nhost
|
||||
$ pnpm install
|
||||
```
|
||||
|
||||
### Development
|
||||
|
||||
Although package references are correctly updated on the fly for TypeScript, example projects and the dashboard won't see the changes because they are depending on the build output. To fix this, you can run packages in development mode.
|
||||
|
||||
Running packages in development mode from the root folder is as simple as:
|
||||
|
||||
```sh
|
||||
$ pnpm dev
|
||||
```
|
||||
|
||||
Our packages are linked together using [PNPM's workspace](https://pnpm.io/workspaces) feature. Next.js and Vite automatically detect changes in the dependencies and rebuild everything, so the changes will be reflected in the examples and the dashboard.
|
||||
|
||||
**Note:** It's possible that Next.js or Vite throw an error when you run `pnpm dev`. Restarting the process should fix it.
|
||||
|
||||
### Use Examples
|
||||
|
||||
Examples are a great way to test your changes in practice. Make sure you've `pnpm dev` running in your terminal and then run an example.
|
||||
|
||||
Let's follow the instructions to run [react-apollo example](https://github.com/nhost/nhost/blob/main/examples/react-apollo/README.md).
|
||||
|
||||
## Edit Documentation
|
||||
|
||||
The easier way to contribute to our documentation is to go to the `docs` folder and follow the [instructions to start local development](https://github.com/nhost/nhost/blob/main/docs/README.md):
|
||||
|
||||
```sh
|
||||
$ cd docs
|
||||
# not necessary if you've already done this step somewhere in the repository
|
||||
$ pnpm install
|
||||
$ pnpm start
|
||||
```
|
||||
|
||||
## Run Test Suites
|
||||
|
||||
### Unit Tests
|
||||
|
||||
You can run the unit tests with the following command from the repository root:
|
||||
|
||||
```sh
|
||||
$ pnpm test
|
||||
```
|
||||
|
||||
### E2E Tests
|
||||
|
||||
Each package that defines end-to-end tests embeds their own Nhost configuration, that will be automatically when running the tests. As a result, you must make sure you are not running the Nhost CLI before running the tests.
|
||||
|
||||
You can run the e2e tests with the following command from the repository root:
|
||||
|
||||
```sh
|
||||
$ pnpm e2e
|
||||
```
|
||||
|
||||
## Changesets
|
||||
|
||||
If you've made changes to the packages, you must describe those changes so that they can be reflected in the next release.
|
||||
We use [changesets](https://github.com/changesets/changesets) to support our versioning and release workflows. When you submit a pull request, a bot checks if changesets are present, and if not, it asks you to add them.
|
||||
|
||||
To create a changeset, run the following command from the repository root:
|
||||
|
||||
```sh
|
||||
$ pnpm changeset
|
||||
```
|
||||
|
||||
This command will guide you through the process of creating a changeset. It will create a file in the `.changeset` directory.
|
||||
|
||||
You can take a look at the changeset documentation: [How to add a changeset](https://github.com/changesets/changesets/blob/main/docs/adding-a-changeset.md).
|
||||
|
||||
### Selecting the Version
|
||||
|
||||
When you create a changeset, you will be asked to select the version of the package that you are bumping. The versioning scheme is as follows:
|
||||
|
||||
- **major**
|
||||
- For breaking changes (e.g: changing the function signature, etc.)
|
||||
- Should be avoided as much as possible as it will require users to update their code. Instead, consider supporting both the old and the new API simultaneously for a while.
|
||||
- For example: `v1.5.8` -> `v2.0.0`
|
||||
- **minor**
|
||||
- For new features (e.g: adding a new page to the dashboard, etc.)
|
||||
- For example: `v1.5.8` -> `v1.6.0`
|
||||
- **patch**
|
||||
- For bug fixes (e.g: fixing a typo, etc.)
|
||||
- For example: `v1.5.8` -> `v1.5.9`
|
||||
|
||||
### Writing Good Changesets
|
||||
|
||||
A concise summary that describes the changes should be added to each PR. This summary will be used as the changeset description.
|
||||
|
||||
The following structure is used for describing changes:
|
||||
|
||||
- **The type of the change**:
|
||||
|
||||
- fix
|
||||
- feat
|
||||
- chore
|
||||
- docs
|
||||
|
||||
- **The scope of the change** (_broader scopes (e.g: dashboard, hasura-storage-js, etc.) are not recommended as GitHub Releases already contain which project is being bumped_):
|
||||
|
||||
- projects
|
||||
- deployments
|
||||
- deps
|
||||
- etc.
|
||||
|
||||
- **A short summary of the changes that were made**
|
||||
|
||||
**Examples:**
|
||||
|
||||
- `fix(deployments): use correct timestamp for deployment details`
|
||||
- `chore(deps): bump @types/react to v18.2.8`
|
||||
- `feat(secrets): enable secrets`
|
||||
- etc.
|
||||
|
||||
You can always take a look at examples of changesets in the [GitHub Releases section](https://github.com/nhost/nhost/releases).
|
||||
16
Makefile
16
Makefile
@@ -1,16 +0,0 @@
|
||||
.PHONY: envrc-install
|
||||
envrc-install: ## Copy envrc.sample to all project folders
|
||||
@for f in $$(find . -name "project.nix"); do \
|
||||
echo "Copying envrc.sample to $$(dirname $$f)/.envrc"; \
|
||||
cp ./envrc.sample $$(dirname $$f)/.envrc; \
|
||||
done
|
||||
|
||||
.PHONY: nixops-container-env
|
||||
nixops-container-env: ## Enter a NixOS container environment
|
||||
docker run \
|
||||
-it \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v ./:/build \
|
||||
-w /build \
|
||||
nixops:0.0.0-dev \
|
||||
bash
|
||||
77
README.md
77
README.md
@@ -4,7 +4,7 @@
|
||||
|
||||
# Nhost
|
||||
|
||||
<a href="https://docs.nhost.io/getting-started/overview">Quickstart</a>
|
||||
<a href="https://docs.nhost.io/introduction#quick-start-guides">Quickstart</a>
|
||||
<span> • </span>
|
||||
<a href="http://nhost.io/">Website</a>
|
||||
<span> • </span>
|
||||
@@ -12,7 +12,7 @@
|
||||
<span> • </span>
|
||||
<a href="https://nhost.io/blog">Blog</a>
|
||||
<span> • </span>
|
||||
<a href="https://x.com/nhost">X</a>
|
||||
<a href="https://twitter.com/nhost">Twitter</a>
|
||||
<span> • </span>
|
||||
<a href="https://nhost.io/discord">Discord</a>
|
||||
<span> • </span>
|
||||
@@ -33,10 +33,10 @@ Nhost consists of open source software:
|
||||
|
||||
- Database: [PostgreSQL](https://www.postgresql.org/)
|
||||
- Instant GraphQL API: [Hasura](https://hasura.io/)
|
||||
- Authentication: [Auth](https://github.com/nhost/nhost/tree/main/services/auth)
|
||||
- Storage: [Storage](https://github.com/nhost/nhost/tree/main/services/storage)
|
||||
- Authentication: [Hasura Auth](https://github.com/nhost/hasura-auth/)
|
||||
- Storage: [Hasura Storage](https://github.com/nhost/hasura-storage)
|
||||
- Serverless Functions: Node.js (JavaScript and TypeScript)
|
||||
- [Nhost CLI](https://github.com/nhost/nhost/tree/main/cli) for local development
|
||||
- [Nhost CLI](https://docs.nhost.io/development/cli/overview) for local development
|
||||
|
||||
## Architecture of Nhost
|
||||
|
||||
@@ -61,34 +61,27 @@ Visit [https://docs.nhost.io](http://docs.nhost.io) for the complete documentati
|
||||
|
||||
Since Nhost is 100% open source, you can self-host the whole Nhost stack. Check out the example [docker-compose file](https://github.com/nhost/nhost/tree/main/examples/docker-compose) to self-host Nhost.
|
||||
|
||||
## Sign In and Make a GraphQL Request
|
||||
## Sign In and Make a Graphql Request
|
||||
|
||||
Install the `@nhost/nhost-js` package and start building your app:
|
||||
Install the `@nhost/nhost-js` package and start build your app:
|
||||
|
||||
```ts
|
||||
import { createClient } from '@nhost/nhost-js'
|
||||
```jsx
|
||||
import { NhostClient } from '@nhost/nhost-js'
|
||||
|
||||
const nhost = createClient({
|
||||
subdomain: 'your-project',
|
||||
region: 'eu-central-1'
|
||||
const nhost = new NhostClient({
|
||||
subdomain: '<your-subdomain>',
|
||||
region: '<your-region>'
|
||||
})
|
||||
|
||||
await nhost.auth.signInEmailPassword({
|
||||
email: 'user@example.com',
|
||||
password: 'password123'
|
||||
})
|
||||
await nhost.auth.signIn({ email: 'user@domain.com', password: 'userPassword' })
|
||||
|
||||
await nhost.graphql.request({
|
||||
query: `
|
||||
query GetUsers {
|
||||
users {
|
||||
id
|
||||
displayName
|
||||
email
|
||||
}
|
||||
}
|
||||
`
|
||||
})
|
||||
await nhost.graphql.request(`{
|
||||
users {
|
||||
id
|
||||
displayName
|
||||
email
|
||||
}
|
||||
}`)
|
||||
```
|
||||
|
||||
## Frontend Agnostic
|
||||
@@ -96,21 +89,33 @@ await nhost.graphql.request({
|
||||
Nhost is frontend agnostic, which means Nhost works with all frontend frameworks.
|
||||
|
||||
<div align="center">
|
||||
<a href="https://docs.nhost.io/getting-started/quickstart/nextjs"><img src="assets/nextjs.svg"/></a>
|
||||
<a href="https://docs.nhost.io/reference/javascript/nhost-js/nhost-client"><img src="assets/nuxtjs.svg"/></a>
|
||||
<a href="https://docs.nhost.io/getting-started/quickstart/react"><img src="assets/react.svg"/></a>
|
||||
<a href="https://docs.nhost.io/getting-started/quickstart/reactnative"><img src="assets/react-native.svg"/></a>
|
||||
<a href="https://docs.nhost.io/reference/javascript/nhost-js/nhost-client"><img src="assets/svelte.svg"/></a>
|
||||
<a href="https://docs.nhost.io/getting-started/quickstart/vue"><img src="assets/vuejs.svg"/></a>
|
||||
<a href="https://docs.nhost.io/guides/quickstarts/nextjs"><img src="assets/nextjs.svg"/></a>
|
||||
<a href="https://docs.nhost.io/reference/javascript"><img src="assets/nuxtjs.svg"/></a>
|
||||
<a href="https://docs.nhost.io/guides/quickstarts/react"><img src="assets/react.svg"/></a>
|
||||
<a href="https://docs.nhost.io/reference/javascript"><img src="assets/react-native.svg"/></a>
|
||||
<a href="https://docs.nhost.io/reference/javascript"><img src="assets/svelte.svg"/></a>
|
||||
<a href="https://docs.nhost.io/guides/quickstarts/vue"><img src="assets/vuejs.svg"/></a>
|
||||
</div>
|
||||
|
||||
# Resources
|
||||
|
||||
- Start developing locally with the [Nhost CLI](https://docs.nhost.io/platform/cli/local-development)
|
||||
- Start developing locally with the [Nhost CLI](https://docs.nhost.io/cli)
|
||||
|
||||
## Nhost Clients
|
||||
|
||||
- [JavaScript/TypeScript](https://docs.nhost.io/reference/javascript/nhost-js/main)
|
||||
- [JavaScript/TypeScript](https://docs.nhost.io/reference/javascript)
|
||||
- [Dart and Flutter](https://github.com/nhost/nhost-dart)
|
||||
- [React](https://docs.nhost.io/reference/react)
|
||||
- [Next.js](https://docs.nhost.io/reference/nextjs)
|
||||
- [Vue](https://docs.nhost.io/reference/vue)
|
||||
|
||||
## Integrations
|
||||
|
||||
- [Apollo](./integrations/apollo#nhostapollo)
|
||||
- [React Apollo](./integrations/react-apollo#nhostreact-apollo)
|
||||
- [React URQL](./integrations/react-urql#nhostreact-urql)
|
||||
- [Stripe GraphQL API](./integrations/stripe-graphql-js#nhoststripe-graphql-js)
|
||||
- [Google Translation GraphQL API](./integrations/google-translation#nhostgoogle-translation)
|
||||
|
||||
## Applications
|
||||
|
||||
@@ -135,7 +140,7 @@ This repository, and most of our other open source projects, are licensed under
|
||||
|
||||
Here are some ways of contributing to making Nhost better:
|
||||
|
||||
- **[Try out Nhost](https://docs.nhost.io)**, and think of ways to make the service better. Let us know here on GitHub.
|
||||
- **[Try out Nhost](https://docs.nhost.io/introduction)**, and think of ways to make the service better. Let us know here on GitHub.
|
||||
- Join our [Discord](https://discord.com/invite/9V7Qb2U) and connect with other members to share and learn from.
|
||||
- Send a pull request to any of our [open source repositories](https://github.com/nhost) on Github. Check our [contribution guide](https://github.com/nhost/nhost/blob/main/CONTRIBUTING.md) and our [developers guide](https://github.com/nhost/nhost/blob/main/DEVELOPERS.md) for more details about how to contribute. We're looking forward to your contribution!
|
||||
|
||||
|
||||
@@ -2,8 +2,5 @@
|
||||
// $schema provides code completion hints to IDEs.
|
||||
"$schema": "https://github.com/IBM/audit-ci/raw/main/docs/schema.json",
|
||||
"moderate": true,
|
||||
"allowlist": [
|
||||
"GHSA-9965-vmph-33xx", // https://github.com/advisories/GHSA-9965-vmph-33xx Update package once have a fix
|
||||
"GHSA-7mvr-c777-76hp" // https://github.com/advisories/GHSA-7mvr-c777-76hp Update package once Nix side is also updated
|
||||
]
|
||||
"allowlist": ["vue-template-compiler"]
|
||||
}
|
||||
|
||||
43
biome.json
43
biome.json
@@ -1,43 +0,0 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.2/schema.json",
|
||||
"vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true },
|
||||
"files": { "ignoreUnknown": false },
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2,
|
||||
"lineWidth": 80
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": true,
|
||||
"complexity": {
|
||||
"useLiteralKeys": "off"
|
||||
}
|
||||
},
|
||||
"includes": ["**", "!.next", "!node_modules"]
|
||||
},
|
||||
"javascript": { "formatter": { "quoteStyle": "double" }, "globals": [] },
|
||||
"assist": {
|
||||
"enabled": true,
|
||||
"actions": { "source": { "organizeImports": "on" } }
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"includes": ["**/*.svelte", "**/*.astro", "**/*.vue"],
|
||||
"linter": {
|
||||
"rules": {
|
||||
"style": {
|
||||
"useConst": "off",
|
||||
"useImportType": "off"
|
||||
},
|
||||
"correctness": {
|
||||
"noUnusedVariables": "off",
|
||||
"noUnusedImports": "off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
# Configuration Files
|
||||
|
||||
This directory contains standardized configurations for various tools and frameworks used across the repository.
|
||||
|
||||
## Available Configurations
|
||||
|
||||
- [**TypeScript (`/tsconfig`)**](./tsconfig/README.md): Centralized TypeScript configurations for different project types
|
||||
|
||||
- Standard base configuration with strict type checking
|
||||
- Specialized configurations for libraries, frontend apps, and Node.js
|
||||
- Documented usage patterns and extension points
|
||||
|
||||
## Using the Configurations
|
||||
|
||||
Each configuration directory contains a README with specific instructions on how to use the configurations in your projects.
|
||||
|
||||
## Benefits
|
||||
|
||||
- **Consistency**: All projects follow the same standards and best practices
|
||||
- **Maintainability**: Configuration changes can be made in one place and propagated to all projects
|
||||
- **Onboarding**: New projects can quickly adopt the standard configurations
|
||||
|
||||
## Adding New Configurations
|
||||
|
||||
When adding new centralized configurations:
|
||||
|
||||
1. Create a new subdirectory with an appropriate name
|
||||
2. Include a README.md explaining the configurations
|
||||
3. Document both the usage and the reasoning behind configuration choices
|
||||
@@ -1,58 +0,0 @@
|
||||
# TypeScript Configurations
|
||||
|
||||
This directory contains centralized TypeScript configurations that can be extended by projects in the monorepo. Using centralized configurations ensures consistency across projects and makes it easier to maintain and update TypeScript settings.
|
||||
|
||||
## Base Configurations
|
||||
|
||||
- `base.json`: Core TypeScript settings used by all projects
|
||||
- `library.json`: Settings for libraries and SDK packages
|
||||
- `frontend.json`: Settings for frontend applications (React, Next.js)
|
||||
- `node.json`: Settings for Node.js applications and scripts
|
||||
- `vite.json`: Settings for Vite configuration files
|
||||
|
||||
## Usage
|
||||
|
||||
In your project's `tsconfig.json` file, extend the appropriate base configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"extends": "../../configs/tsconfig/frontend.json",
|
||||
"compilerOptions": {
|
||||
// Project-specific overrides here
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration Features
|
||||
|
||||
### Common Features
|
||||
|
||||
- Strict type checking
|
||||
- Modern ES features
|
||||
- Comprehensive linting rules
|
||||
- Proper module resolution
|
||||
|
||||
### Library Configuration
|
||||
|
||||
- Declaration file generation
|
||||
- Source maps
|
||||
- Composite project support
|
||||
|
||||
### Frontend Configuration
|
||||
|
||||
- JSX support
|
||||
- DOM typings
|
||||
- Bundler module resolution
|
||||
- Compatible with both React and Next.js
|
||||
- Configurable for specific framework needs
|
||||
|
||||
## Creating New Projects
|
||||
|
||||
When creating a new project:
|
||||
|
||||
1. Identify the appropriate base configuration for your project type
|
||||
2. Create a minimal `tsconfig.json` that extends the base configuration from the `configs/tsconfig` directory
|
||||
3. Add only project-specific customizations to your `tsconfig.json`
|
||||
|
||||
This approach ensures all projects follow the same standards while allowing for project-specific needs.
|
||||
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Base Configuration",
|
||||
"compilerOptions": {
|
||||
/* Environment and Features */
|
||||
"lib": ["ESNext"],
|
||||
"target": "ES2022",
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitOverride": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noPropertyAccessFromIndexSignature": true,
|
||||
"allowUnusedLabels": false,
|
||||
"allowUnreachableCode": false,
|
||||
|
||||
/* Module Resolution */
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
|
||||
/* Advanced Options */
|
||||
"verbatimModuleSyntax": true,
|
||||
"isolatedModules": true
|
||||
},
|
||||
"exclude": ["node_modules", "**/dist", "**/build"]
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Frontend Configuration",
|
||||
"extends": "./base.json",
|
||||
"compilerOptions": {
|
||||
/* Frontend Specific */
|
||||
"lib": ["ESNext", "DOM", "DOM.Iterable"],
|
||||
"jsx": "react-jsx",
|
||||
|
||||
/* Module Resolution */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"noEmit": true,
|
||||
|
||||
/* Additional Options */
|
||||
"allowJs": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"incremental": true,
|
||||
|
||||
/* Next.js Compatibility (ignored by non-Next.js projects) */
|
||||
"plugins": []
|
||||
},
|
||||
"include": ["src/**/*", "**/*.ts", "**/*.tsx"],
|
||||
"exclude": ["node_modules", "**/node_modules/*"]
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Library/SDK Configuration",
|
||||
"extends": "./base.json",
|
||||
"compilerOptions": {
|
||||
/* Output Configuration */
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"noEmit": false,
|
||||
"composite": true,
|
||||
"importHelpers": true,
|
||||
|
||||
/* Library-specific */
|
||||
"moduleResolution": "node",
|
||||
|
||||
/* Types */
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"**/*.test.ts",
|
||||
"**/*.spec.ts",
|
||||
"**/__tests__/**",
|
||||
"dist",
|
||||
"**/dist/*"
|
||||
]
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node.js Configuration",
|
||||
"extends": "./base.json",
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"target": "ES2022",
|
||||
|
||||
"allowJs": true,
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
|
||||
/* Node-specific options */
|
||||
"sourceMap": true,
|
||||
|
||||
/* Types */
|
||||
"types": ["node"]
|
||||
},
|
||||
"exclude": ["node_modules", "**/node_modules/*"]
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Vite Configuration",
|
||||
"extends": "./node.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"skipLibCheck": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
PROJ_DIR=$(abspath .)
|
||||
PROJ=$(subst $(ROOT_DIR)/,,$(PROJ_DIR))
|
||||
NAME=$(notdir $(PROJ))
|
||||
|
||||
include $(ROOT_DIR)/build/makefiles/release.makefile
|
||||
|
||||
ifdef VER
|
||||
VERSION=$(shell echo $(VER) | sed -e 's/^v//g' -e 's/\//_/g')
|
||||
else
|
||||
VERSION=$(shell grep -oP 'version\s*=\s*"\K[^"]+' project.nix | head -n 1)
|
||||
endif
|
||||
|
||||
ifeq ($(shell uname -m),x86_64)
|
||||
ARCH?=x86_64
|
||||
else ifeq ($(shell uname -m),arm64)
|
||||
ARCH?=aarch64
|
||||
else ifeq ($(shell uname -m),aarch64)
|
||||
ARCH?=aarch64
|
||||
else
|
||||
ARCH?=FIXME-$(shell uname -m)
|
||||
endif
|
||||
|
||||
ifeq ($(shell uname -o),Darwin)
|
||||
OS?=darwin
|
||||
else
|
||||
OS?=linux
|
||||
endif
|
||||
|
||||
ifeq ($(CI),true)
|
||||
docker-build-options=--option system $(ARCH)-linux --extra-platforms ${ARCH}-linux
|
||||
endif
|
||||
|
||||
|
||||
.PHONY: help
|
||||
help: ## Show this help.
|
||||
@echo
|
||||
@awk 'BEGIN { \
|
||||
FS = "##"; \
|
||||
printf "Usage: make \033[36m<target>\033[0m\n"} \
|
||||
/^[a-zA-Z_-]+%?:.*?##/ { printf " \033[36m%-38s\033[0m %s\n", $$1, $$2 } ' \
|
||||
$(MAKEFILE_LIST)
|
||||
|
||||
.PHONY: print-vars
|
||||
print-vars: ## print all variables
|
||||
@$(foreach V,$(sort $(.VARIABLES)), \
|
||||
$(if $(filter-out environment% default automatic, \
|
||||
$(origin $V)),$(info $V=$($V) ($(value $V)))))
|
||||
|
||||
|
||||
.PHONY: get-version
|
||||
get-version: ## Return version
|
||||
@sed -i '/^\s*version = "0.0.0-dev";/s//version = "${VERSION}";/' project.nix
|
||||
@sed -i '/^\s*created = "1970-.*";/s//created = "${shell date --utc '+%Y-%m-%dT%H:%M:%SZ'}";/' project.nix
|
||||
@echo $(VERSION)
|
||||
|
||||
|
||||
.PHONY: develop
|
||||
develop: ## Start a nix develop shell
|
||||
nix develop .\#$(NAME)
|
||||
|
||||
|
||||
.PHONY: _check-pre
|
||||
_check-pre: ## Pre-checks before running nix flake check
|
||||
|
||||
|
||||
.PHONY: check
|
||||
check: _check-pre ## Run nix flake check
|
||||
nix build \
|
||||
--print-build-logs \
|
||||
.\#checks.$(ARCH)-$(OS).$(NAME)
|
||||
|
||||
|
||||
.PHONY: check-dry-run
|
||||
check-dry-run: ## Returns the derivation of the check
|
||||
@nix build \
|
||||
--dry-run \
|
||||
--json \
|
||||
.\#checks.$(ARCH)-$(OS).$(NAME) | jq -r '.[].outputs.out'
|
||||
|
||||
|
||||
.PHONY: build
|
||||
build: ## Build application and places the binary under ./result/bin
|
||||
nix build \
|
||||
--print-build-logs \
|
||||
.\#packages.$(ARCH)-$(OS).$(NAME)
|
||||
|
||||
|
||||
.PHONY: build-dry-run
|
||||
build-dry-run: ## Run nix flake check
|
||||
@nix build \
|
||||
--dry-run \
|
||||
--json \
|
||||
.\#packages.$(ARCH)-$(OS).$(NAME) | jq -r '.[].outputs.out'
|
||||
|
||||
|
||||
.PHONY: build-nixops-dry-run
|
||||
build-nixops-dry-run: ## Checks if nixops needs to be rebuilt
|
||||
@nix build \
|
||||
--dry-run \
|
||||
--json \
|
||||
.\#packages.$(ARCH)-$(OS).nixops | jq -r '.[].outputs.out'
|
||||
|
||||
|
||||
.PHONY: build-docker-image
|
||||
build-docker-image: ## Build docker container for native architecture
|
||||
nix build $(docker-build-options) --show-trace \
|
||||
.\#packages.$(ARCH)-linux.$(NAME)-docker-image \
|
||||
--print-build-logs
|
||||
nix develop \#skopeo -c \
|
||||
skopeo copy --insecure-policy dir:./result docker-daemon:$(NAME):$(VERSION)
|
||||
|
||||
|
||||
.PHONY: build-docker-image-import-bare
|
||||
build-docker-image-import-bare:
|
||||
skopeo copy --insecure-policy dir:./result docker-daemon:$(NAME):$(VERSION)
|
||||
|
||||
|
||||
.PHONY: dev-env-up
|
||||
dev-env-up: _dev-env-build _dev-env-up ## Starts development environment
|
||||
|
||||
|
||||
.PHONY: dev-env-down
|
||||
dev-env-down: _dev-env-down ## Stops development environment
|
||||
|
||||
|
||||
.PHONY: dev-env-build
|
||||
dev-env-build: _dev-env-build ## Builds development environment
|
||||
@@ -1,30 +0,0 @@
|
||||
TAG_NAME?=$(NAME)
|
||||
TAG_PATTERN="^$(TAG_NAME)@\d+\.\d+\.\d+$$"
|
||||
|
||||
|
||||
.PHONY: changelog-init
|
||||
changelog-init: ## Initialize changelog using git-cliff
|
||||
@git cliff -u --tag-pattern "$(TAG_PATTERN)" --bump --tag="$(NAME)/$(VERSION)" --output CHANGELOG.md
|
||||
|
||||
.PHONY: changelog-next-version
|
||||
changelog-next-version: ## Get next version using git-cliff
|
||||
@git cliff -u --bumped-version --tag-pattern $(TAG_PATTERN) $(CLIFF_OPTS) | sed 's/.*@//'
|
||||
|
||||
.PHONY: changelog-get-released
|
||||
changelog-get-released: ## Get changelog for the latest release using git-cliff
|
||||
@git cliff -l --bump --tag-pattern $(TAG_PATTERN) $(CLIFF_OPTS) --strip all
|
||||
|
||||
|
||||
.PHONY: changelog-get-unreleased
|
||||
changelog-get-unreleased: ## Get changelog for the following release using git-cliff
|
||||
@git cliff -u --bump --tag-pattern $(TAG_PATTERN) $(CLIFF_OPTS) --strip all
|
||||
|
||||
|
||||
.PHONY: changelog-update
|
||||
changelog-update: ## Update changelog using git-cliff
|
||||
@git cliff -u --bump --tag-pattern $(TAG_PATTERN) $(CLIFF_OPTS) --prepend CHANGELOG.md
|
||||
|
||||
|
||||
.PHONY: release-tag-name
|
||||
release-tag-name: ## Get the tag name for the current version
|
||||
@echo "$(TAG_NAME)"
|
||||
@@ -1,95 +0,0 @@
|
||||
## [cli@1.34.5] - 2025-11-06
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(nixops)* Bump go to 1.25.3 and nixpkgs due to CVEs (#3652)
|
||||
- *(cli)* Udpate certs and schema (#3675)
|
||||
- *(cli)* Bump nhost/dashboard to 2.41.0 (#3669)
|
||||
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [cli@1.34.4] - 2025-10-28
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
- *(cli)* Update NEXT_PUBLIC_NHOST_HASURA_MIGRATIONS_API_URL correctly (#3643)
|
||||
|
||||
## [cli@1.34.3] - 2025-10-27
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(cli)* Update schema (#3622)
|
||||
- *(cli)* Bump nhost/dashboard to 2.40.0 (#3629)
|
||||
|
||||
## [cli@1.34.2] - 2025-10-20
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(cli)* Minor fix to download script when specifying version (#3602)
|
||||
- *(cli)* Update schema (#3613)
|
||||
|
||||
## [cli@1.34.1] - 2025-10-13
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
- *(cli)* Remove references to mcp-nhost (#3575)
|
||||
- *(cli)* Workaround os.Rename issues when src and dst are on different partitions (#3599)
|
||||
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(auth)* Change some references to deprecated hasura-auth (#3584)
|
||||
- *(docs)* Udpated README.md and CONTRIBUTING.md (#3587)
|
||||
|
||||
## [cli@1.34.0] - 2025-10-09
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- *(cli)* Added mcp server functionality from mcp-nhost (#3550)
|
||||
- *(cli)* Mcp: move configuration to .nhost folder and integrate cloud credentials (#3555)
|
||||
- *(cli)* Mcp: added support for environment variables in the configuration (#3556)
|
||||
- *(cli)* MCP refactor and documentation prior to official release (#3571)
|
||||
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
- *(dashboard)* Remove NODE_ENV from restricted env vars (#3573)
|
||||
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(nixops)* Update nhost-cli (#3554)
|
||||
- *(cli)* Bump nhost/dashboard to 2.38.4 (#3539)
|
||||
|
||||
## [cli@1.33.0] - 2025-10-02
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- *(cli)* Migrate from urfave/v2 to urfave/v3 (#3545)
|
||||
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
- *(cli)* Disable tls on AUTH_SERVER_URL when auth uses custom port (#3549)
|
||||
- *(cli)* Fix breaking change in go-getter dependency (#3551)
|
||||
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(cli)* Update certs (#3552)
|
||||
|
||||
## [cli@1.32.2] - 2025-10-01
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(cli)* Remove hasura- prefix from auth/storage images (#3538)
|
||||
|
||||
## [cli@1.32.1] - 2025-09-29
|
||||
|
||||
### ⚙️ Miscellaneous Tasks
|
||||
|
||||
- *(ci)* Minor improvements to the ci (#3527)
|
||||
- *(cli)* Update schema (#3529)
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
# Developer Guide
|
||||
|
||||
## Requirements
|
||||
|
||||
We use nix to manage the development environment, the build process and for running tests.
|
||||
|
||||
### With Nix (Recommended)
|
||||
|
||||
Run `nix develop \#cli` to get a complete development environment.
|
||||
|
||||
### Without Nix
|
||||
|
||||
Check `project.nix` (checkDeps, buildInputs, buildNativeInputs) for manual dependency installation. Alternatively, you can run `make nixops-container-env` in the root of the repository to enter a Docker container with nix and all dependencies pre-installed (note it is a large image).
|
||||
|
||||
## Development Workflow
|
||||
|
||||
### Running Tests
|
||||
|
||||
**With Nix:**
|
||||
```bash
|
||||
make dev-env-up
|
||||
make check
|
||||
```
|
||||
|
||||
**Without Nix:**
|
||||
```bash
|
||||
# Start development environment
|
||||
make dev-env-up
|
||||
|
||||
# Lint Go code
|
||||
golangci-lint run ./...
|
||||
|
||||
# Run tests
|
||||
go test -v ./...
|
||||
```
|
||||
|
||||
### Formatting
|
||||
|
||||
Format code before committing:
|
||||
```bash
|
||||
golines -w --base-formatter=gofumpt .
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
### Local Build
|
||||
|
||||
Build the project (output in `./result`):
|
||||
```bash
|
||||
make build
|
||||
```
|
||||
|
||||
### Docker Image
|
||||
|
||||
Build and import Docker image with skopeo:
|
||||
```bash
|
||||
make build-docker-image
|
||||
```
|
||||
|
||||
If you run the command above inside the dockerized nixops-container-env and you get an error like:
|
||||
|
||||
```
|
||||
FATA[0000] writing blob: io: read/write on closed pipe
|
||||
```
|
||||
|
||||
then you need to run the following command outside of the container (needs skopeo installed on the host):
|
||||
|
||||
```bash
|
||||
cd cli
|
||||
make build-docker-image-import-bare
|
||||
```
|
||||
|
||||
### Multi-Platform Builds
|
||||
|
||||
Build for multiple platforms (Darwin/Linux, ARM64/AMD64):
|
||||
```bash
|
||||
make build-multiplatform
|
||||
```
|
||||
|
||||
This produces binaries for:
|
||||
- darwin/arm64
|
||||
- darwin/amd64
|
||||
- linux/arm64
|
||||
- linux/amd64
|
||||
29
cli/Makefile
29
cli/Makefile
@@ -1,29 +0,0 @@
|
||||
ROOT_DIR?=$(abspath ../)
|
||||
include $(ROOT_DIR)/build/makefiles/general.makefile
|
||||
|
||||
|
||||
.PHONY: _check-pre
|
||||
_check-pre:
|
||||
@sed -i 's/$$NHOST_PAT/$(NHOST_PAT)/' get_access_token.sh
|
||||
|
||||
|
||||
.PHONY: _dev-env-up
|
||||
_dev-env-up:
|
||||
@echo "Nothing to do"
|
||||
|
||||
|
||||
.PHONY: _dev-env-down
|
||||
_dev-env-down:
|
||||
@echo "Nothing to do"
|
||||
|
||||
|
||||
.PHONY: _dev-env-build
|
||||
_dev-env-build:
|
||||
@echo "Nothing to do"
|
||||
|
||||
|
||||
.PHONY: build-multiplatform
|
||||
build-multiplatform: ## Build cli for all supported platforms
|
||||
nix build \
|
||||
--print-build-logs \
|
||||
.\#packages.$(ARCH)-$(OS).cli-multiplatform
|
||||
@@ -1,89 +0,0 @@
|
||||
<div align="center">
|
||||
<h1 style="font-size: 3em; font-weight: bold;">Nhost CLI</h1>
|
||||
</div>
|
||||
|
||||
[Nhost](http://nhost.io) is an open-source Firebase alternative with GraphQL.
|
||||
|
||||
The Nhost CLI is used to set up a local development environment. This environment will automatically track database migrations and Hasura metadata.
|
||||
|
||||
It's recommended to use the Nhost CLI and the [Nhost GitHub Integration](https://docs.nhost.io/platform/github-integration) to develop locally and automatically deploy changes to production with a git-based workflow (similar to Netlify & Vercel).
|
||||
|
||||
## Services
|
||||
|
||||
- [Nhost Dashboard](https://github.com/nhost/nhost/tree/main/dashboard)
|
||||
- [Postgres Database](https://www.postgresql.org/)
|
||||
- [GraphQL Engine](https://github.com/hasura/graphql-engine)
|
||||
- [Auth](https://github.com/nhost/nhost/main/auth)
|
||||
- [Storage](https://github.com/nhost/nhost/main/storage)
|
||||
- [Nhost Serverless Functions](https://github.com/nhost/functions)
|
||||
- [Minio S3](https://github.com/minio/minio)
|
||||
- [Mailhog](https://github.com/mailhog/MailHog)
|
||||
|
||||
## Get Started
|
||||
|
||||
### Install the Nhost CLI
|
||||
|
||||
```bash
|
||||
sudo curl -L https://raw.githubusercontent.com/nhost/nhost/main/cli/get.sh | bash
|
||||
```
|
||||
|
||||
### Initialize a project
|
||||
|
||||
```bash
|
||||
nhost init
|
||||
```
|
||||
|
||||
### Initialize a project with a remote project as a starting point
|
||||
|
||||
```bash
|
||||
nhost init --remote
|
||||
```
|
||||
|
||||
### Start the development environment
|
||||
|
||||
```bash
|
||||
nhost up
|
||||
```
|
||||
|
||||
### Use the Nhost Dashboard
|
||||
|
||||
```bash
|
||||
nhost up --ui nhost
|
||||
```
|
||||
|
||||
## MCP Server
|
||||
|
||||
The Nhost cli ships with an MCP server that lets you interact with your Nhost projects through AI assistants using the Model Context Protocol. It provides secure, controlled access to your GraphQL data, project configuration, and documentation—with granular permissions that let you specify exactly which queries and mutations an LLM can execute. For development, it streamlines your workflow by enabling AI-assisted schema management, metadata changes, and migrations, while providing direct access to your GraphQL schema for intelligent query building.
|
||||
|
||||
You can read more about the MCP server in the [MCP Server documentation](https://docs.nhost.io/platform/cli/mcp/overview).
|
||||
|
||||
## Documentation
|
||||
|
||||
- [Get started with Nhost CLI (longer version)](https://docs.nhost.io/platform/overview/get-started-with-nhost-cli)
|
||||
- [Nhost CLI](https://docs.nhost.io/platform/cli)
|
||||
- [Reference](https://docs.nhost.io/reference/cli)
|
||||
- [MCP Server](https://docs.nhost.io/platform/cli/mcp/overview)
|
||||
|
||||
## Build from Source
|
||||
|
||||
Make sure you have [Go](https://golang.org/doc/install) 1.18 or later installed.
|
||||
|
||||
The source code includes a self-signed certificate for testing purposes. Nhost workers with configured access to AWS may use the `cert.sh` script to generate a real certificate from Let's Encrypt.
|
||||
|
||||
```bash
|
||||
go build -o /usr/local/bin/nhost
|
||||
```
|
||||
This will build the binary available as the `nhost` command in the terminal.
|
||||
|
||||
## Dependencies
|
||||
|
||||
- [Docker](https://docs.docker.com/get-docker/)
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||
- [curl](https://curl.se/)
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
|
||||
## Supported Platforms
|
||||
|
||||
- MacOS
|
||||
- Linux
|
||||
- Windows WSL2
|
||||
49
cli/cert.sh
49
cli/cert.sh
@@ -1,49 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
certbot certonly \
|
||||
-v \
|
||||
--dns-route53 \
|
||||
-d local.auth.nhost.run \
|
||||
-d local.dashboard.nhost.run \
|
||||
-d local.db.nhost.run \
|
||||
-d local.functions.nhost.run \
|
||||
-d local.graphql.nhost.run \
|
||||
-d local.hasura.nhost.run \
|
||||
-d local.mailhog.nhost.run \
|
||||
-d local.storage.nhost.run \
|
||||
-m 'admin@nhost.io' \
|
||||
--non-interactive \
|
||||
--agree-tos \
|
||||
--server https://acme-v02.api.letsencrypt.org/directory \
|
||||
--logs-dir letsencrypt \
|
||||
--config-dir letsencrypt \
|
||||
--work-dir letsencrypt
|
||||
|
||||
cp letsencrypt/live/local.auth.nhost.run/fullchain.pem ssl/.ssl/local-fullchain.pem
|
||||
cp letsencrypt/live/local.auth.nhost.run/privkey.pem ssl/.ssl/local-privkey.pem
|
||||
|
||||
certbot certonly \
|
||||
-v \
|
||||
--manual \
|
||||
--preferred-challenges dns \
|
||||
-d *.auth.local.nhost.run \
|
||||
-d *.dashboard.local.nhost.run \
|
||||
-d *.db.local.nhost.run \
|
||||
-d *.functions.local.nhost.run \
|
||||
-d *.graphql.local.nhost.run \
|
||||
-d *.hasura.local.nhost.run \
|
||||
-d *.mailhog.local.nhost.run \
|
||||
-d *.storage.local.nhost.run \
|
||||
-m 'admin@nhost.io' \
|
||||
--agree-tos \
|
||||
--server https://acme-v02.api.letsencrypt.org/directory \
|
||||
--logs-dir letsencrypt \
|
||||
--config-dir letsencrypt \
|
||||
--work-dir letsencrypt
|
||||
|
||||
cp letsencrypt/live/auth.local.nhost.run/fullchain.pem ssl/.ssl/sub-fullchain.pem
|
||||
cp letsencrypt/live/auth.local.nhost.run/privkey.pem ssl/.ssl/sub-privkey.pem
|
||||
|
||||
rm -rf letsencrypt
|
||||
@@ -1,126 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient"
|
||||
"github.com/nhost/nhost/cli/nhostclient/graphql"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func sanitizeName(name string) string {
|
||||
re := regexp.MustCompile(`[^a-zA-Z0-9_-]`)
|
||||
return strings.ToLower(re.ReplaceAllString(name, ""))
|
||||
}
|
||||
|
||||
type CliEnv struct {
|
||||
stdout io.Writer
|
||||
stderr io.Writer
|
||||
Path *PathStructure
|
||||
authURL string
|
||||
graphqlURL string
|
||||
branch string
|
||||
nhclient *nhostclient.Client
|
||||
nhpublicclient *nhostclient.Client
|
||||
projectName string
|
||||
localSubdomain string
|
||||
}
|
||||
|
||||
func New(
|
||||
stdout io.Writer,
|
||||
stderr io.Writer,
|
||||
path *PathStructure,
|
||||
authURL string,
|
||||
graphqlURL string,
|
||||
branch string,
|
||||
projectName string,
|
||||
localSubdomain string,
|
||||
) *CliEnv {
|
||||
return &CliEnv{
|
||||
stdout: stdout,
|
||||
stderr: stderr,
|
||||
Path: path,
|
||||
authURL: authURL,
|
||||
graphqlURL: graphqlURL,
|
||||
branch: branch,
|
||||
nhclient: nil,
|
||||
nhpublicclient: nil,
|
||||
projectName: projectName,
|
||||
localSubdomain: localSubdomain,
|
||||
}
|
||||
}
|
||||
|
||||
func FromCLI(cmd *cli.Command) *CliEnv {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return &CliEnv{
|
||||
stdout: cmd.Writer,
|
||||
stderr: cmd.ErrWriter,
|
||||
Path: NewPathStructure(
|
||||
cwd,
|
||||
cmd.String(flagRootFolder),
|
||||
cmd.String(flagDotNhostFolder),
|
||||
cmd.String(flagNhostFolder),
|
||||
),
|
||||
authURL: cmd.String(flagAuthURL),
|
||||
graphqlURL: cmd.String(flagGraphqlURL),
|
||||
branch: cmd.String(flagBranch),
|
||||
projectName: sanitizeName(cmd.String(flagProjectName)),
|
||||
nhclient: nil,
|
||||
nhpublicclient: nil,
|
||||
localSubdomain: cmd.String(flagLocalSubdomain),
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) ProjectName() string {
|
||||
return ce.projectName
|
||||
}
|
||||
|
||||
func (ce *CliEnv) LocalSubdomain() string {
|
||||
return ce.localSubdomain
|
||||
}
|
||||
|
||||
func (ce *CliEnv) AuthURL() string {
|
||||
return ce.authURL
|
||||
}
|
||||
|
||||
func (ce *CliEnv) GraphqlURL() string {
|
||||
return ce.graphqlURL
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Branch() string {
|
||||
return ce.branch
|
||||
}
|
||||
|
||||
func (ce *CliEnv) GetNhostClient(ctx context.Context) (*nhostclient.Client, error) {
|
||||
if ce.nhclient == nil {
|
||||
session, err := ce.LoadSession(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load session: %w", err)
|
||||
}
|
||||
|
||||
ce.nhclient = nhostclient.New(
|
||||
ce.authURL,
|
||||
ce.graphqlURL,
|
||||
graphql.WithAccessToken(session.Session.AccessToken),
|
||||
)
|
||||
}
|
||||
|
||||
return ce.nhclient, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) GetNhostPublicClient() (*nhostclient.Client, error) {
|
||||
if ce.nhpublicclient == nil {
|
||||
ce.nhpublicclient = nhostclient.New(ce.authURL, ce.graphqlURL)
|
||||
}
|
||||
|
||||
return ce.nhpublicclient, nil
|
||||
}
|
||||
@@ -1,101 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type PathStructure struct {
|
||||
workingDir string
|
||||
root string
|
||||
dotNhostFolder string
|
||||
nhostFolder string
|
||||
}
|
||||
|
||||
func NewPathStructure(
|
||||
workingDir, root, dotNhostFolder, nhostFolder string,
|
||||
) *PathStructure {
|
||||
return &PathStructure{
|
||||
workingDir: workingDir,
|
||||
root: root,
|
||||
dotNhostFolder: dotNhostFolder,
|
||||
nhostFolder: nhostFolder,
|
||||
}
|
||||
}
|
||||
|
||||
func (p PathStructure) WorkingDir() string {
|
||||
return p.workingDir
|
||||
}
|
||||
|
||||
func (p PathStructure) Root() string {
|
||||
return p.root
|
||||
}
|
||||
|
||||
func (p PathStructure) DotNhostFolder() string {
|
||||
return p.dotNhostFolder
|
||||
}
|
||||
|
||||
func (p PathStructure) NhostFolder() string {
|
||||
return p.nhostFolder
|
||||
}
|
||||
|
||||
func (p PathStructure) AuthFile() string {
|
||||
return filepath.Join(PathStateHome(), "auth.json")
|
||||
}
|
||||
|
||||
func (p PathStructure) NhostToml() string {
|
||||
return filepath.Join(p.nhostFolder, "nhost.toml")
|
||||
}
|
||||
|
||||
func (p PathStructure) OverlaysFolder() string {
|
||||
return filepath.Join(p.nhostFolder, "overlays")
|
||||
}
|
||||
|
||||
func (p PathStructure) Overlay(subdomain string) string {
|
||||
return filepath.Join(p.OverlaysFolder(), subdomain+".json")
|
||||
}
|
||||
|
||||
func (p PathStructure) Secrets() string {
|
||||
return filepath.Join(p.root, ".secrets")
|
||||
}
|
||||
|
||||
func (p PathStructure) HasuraConfig() string {
|
||||
return filepath.Join(p.nhostFolder, "config.yaml")
|
||||
}
|
||||
|
||||
func (p PathStructure) ProjectFile() string {
|
||||
return filepath.Join(p.dotNhostFolder, "project.json")
|
||||
}
|
||||
|
||||
func (p PathStructure) DockerCompose() string {
|
||||
return filepath.Join(p.dotNhostFolder, "docker-compose.yaml")
|
||||
}
|
||||
|
||||
func (p PathStructure) Functions() string {
|
||||
return filepath.Join(p.root, "functions")
|
||||
}
|
||||
|
||||
func PathExists(path string) bool {
|
||||
_, err := os.Stat(path)
|
||||
return !os.IsNotExist(err)
|
||||
}
|
||||
|
||||
func PathStateHome() string {
|
||||
var path string
|
||||
if os.Getenv("XDG_STATE_HOME") != "" {
|
||||
path = filepath.Join(os.Getenv("XDG_STATE_HOME"), "nhost")
|
||||
} else {
|
||||
path = filepath.Join(os.Getenv("HOME"), ".nhost", "state")
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
func (p PathStructure) RunServiceOverlaysFolder(configPath string) string {
|
||||
base := filepath.Dir(configPath)
|
||||
return filepath.Join(base, "nhost", "overlays")
|
||||
}
|
||||
|
||||
func (p PathStructure) RunServiceOverlay(configPath, subdomain string) string {
|
||||
return filepath.Join(p.RunServiceOverlaysFolder(configPath), "run-"+subdomain+".json")
|
||||
}
|
||||
@@ -1,108 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
const (
|
||||
flagAuthURL = "auth-url"
|
||||
flagGraphqlURL = "graphql-url"
|
||||
flagBranch = "branch"
|
||||
flagProjectName = "project-name"
|
||||
flagRootFolder = "root-folder"
|
||||
flagNhostFolder = "nhost-folder"
|
||||
flagDotNhostFolder = "dot-nhost-folder"
|
||||
flagLocalSubdomain = "local-subdomain"
|
||||
)
|
||||
|
||||
func getGitBranchName() string {
|
||||
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{
|
||||
DetectDotGit: true,
|
||||
EnableDotGitCommonDir: false,
|
||||
})
|
||||
if err != nil {
|
||||
return "nogit"
|
||||
}
|
||||
|
||||
head, err := repo.Head()
|
||||
if err != nil {
|
||||
return "nogit"
|
||||
}
|
||||
|
||||
return head.Name().Short()
|
||||
}
|
||||
|
||||
func Flags() ([]cli.Flag, error) {
|
||||
fullWorkingDir, err := os.Getwd()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get working directory: %w", err)
|
||||
}
|
||||
|
||||
branch := getGitBranchName()
|
||||
|
||||
workingDir := "."
|
||||
dotNhostFolder := filepath.Join(workingDir, ".nhost")
|
||||
nhostFolder := filepath.Join(workingDir, "nhost")
|
||||
|
||||
return []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagAuthURL,
|
||||
Usage: "Nhost auth URL",
|
||||
Sources: cli.EnvVars("NHOST_CLI_AUTH_URL"),
|
||||
Value: "https://otsispdzcwxyqzbfntmj.auth.eu-central-1.nhost.run/v1",
|
||||
Hidden: true,
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagGraphqlURL,
|
||||
Usage: "Nhost GraphQL URL",
|
||||
Sources: cli.EnvVars("NHOST_CLI_GRAPHQL_URL"),
|
||||
Value: "https://otsispdzcwxyqzbfntmj.graphql.eu-central-1.nhost.run/v1",
|
||||
Hidden: true,
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagBranch,
|
||||
Usage: "Git branch name. If not set, it will be detected from the current git repository. This flag is used to dynamically create docker volumes for each branch. If you want to have a static volume name or if you are not using git, set this flag to a static value.", //nolint:lll
|
||||
Sources: cli.EnvVars("BRANCH"),
|
||||
Value: branch,
|
||||
Hidden: false,
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagRootFolder,
|
||||
Usage: "Root folder of project\n\t",
|
||||
Sources: cli.EnvVars("NHOST_ROOT_FOLDER"),
|
||||
Value: workingDir,
|
||||
Category: "Project structure",
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagDotNhostFolder,
|
||||
Usage: "Path to .nhost folder\n\t",
|
||||
Sources: cli.EnvVars("NHOST_DOT_NHOST_FOLDER"),
|
||||
Value: dotNhostFolder,
|
||||
Category: "Project structure",
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagNhostFolder,
|
||||
Usage: "Path to nhost folder\n\t",
|
||||
Sources: cli.EnvVars("NHOST_NHOST_FOLDER"),
|
||||
Value: nhostFolder,
|
||||
Category: "Project structure",
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagProjectName,
|
||||
Usage: "Project name",
|
||||
Value: filepath.Base(fullWorkingDir),
|
||||
Sources: cli.EnvVars("NHOST_PROJECT_NAME"),
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagLocalSubdomain,
|
||||
Usage: "Local subdomain to reach the development environment",
|
||||
Value: "local",
|
||||
Sources: cli.EnvVars("NHOST_LOCAL_SUBDOMAIN"),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
@@ -1,91 +0,0 @@
|
||||
//nolint:gochecknoglobals
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"syscall"
|
||||
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"golang.org/x/term"
|
||||
)
|
||||
|
||||
const (
|
||||
ANSIColorWhite = lipgloss.Color("15")
|
||||
ANSIColorCyan = lipgloss.Color("14")
|
||||
ANSIColorPurple = lipgloss.Color("13")
|
||||
ANSIColorBlue = lipgloss.Color("12")
|
||||
ANSIColorYellow = lipgloss.Color("11")
|
||||
ANSIColorGreen = lipgloss.Color("10")
|
||||
ANSIColorRed = lipgloss.Color("9")
|
||||
ANSIColorGray = lipgloss.Color("8")
|
||||
)
|
||||
|
||||
const (
|
||||
IconInfo = "ℹ️"
|
||||
IconWarn = "⚠"
|
||||
)
|
||||
|
||||
var info = lipgloss.NewStyle().
|
||||
Foreground(ANSIColorCyan).
|
||||
Render
|
||||
|
||||
var warn = lipgloss.NewStyle().
|
||||
Foreground(ANSIColorYellow).
|
||||
Render
|
||||
|
||||
var promptMessage = lipgloss.NewStyle().
|
||||
Foreground(ANSIColorCyan).
|
||||
Bold(true).
|
||||
Render
|
||||
|
||||
func (ce *CliEnv) Println(msg string, a ...any) {
|
||||
if _, err := fmt.Fprintln(ce.stdout, fmt.Sprintf(msg, a...)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Infoln(msg string, a ...any) {
|
||||
if _, err := fmt.Fprintln(ce.stdout, info(fmt.Sprintf(msg, a...))); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Warnln(msg string, a ...any) {
|
||||
if _, err := fmt.Fprintln(ce.stdout, warn(fmt.Sprintf(msg, a...))); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) PromptMessage(msg string, a ...any) {
|
||||
if _, err := fmt.Fprint(ce.stdout, promptMessage("- "+fmt.Sprintf(msg, a...))); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) PromptInput(hide bool) (string, error) {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
|
||||
var (
|
||||
response string
|
||||
err error
|
||||
)
|
||||
|
||||
if !hide {
|
||||
response, err = reader.ReadString('\n')
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
} else {
|
||||
output, err := term.ReadPassword(syscall.Stdin)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
|
||||
response = string(output)
|
||||
}
|
||||
|
||||
return strings.TrimSpace(response), err
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import "github.com/charmbracelet/lipgloss"
|
||||
|
||||
type Column struct {
|
||||
Header string
|
||||
Rows []string
|
||||
}
|
||||
|
||||
func Table(columns ...Column) string {
|
||||
list := lipgloss.NewStyle().
|
||||
Border(lipgloss.NormalBorder(), false, true, false, false).
|
||||
BorderForeground(ANSIColorGray).
|
||||
Padding(1)
|
||||
// Width(30 + 1) //nolint:mnd
|
||||
|
||||
listHeader := lipgloss.NewStyle().
|
||||
Foreground(ANSIColorPurple).
|
||||
Render
|
||||
|
||||
listItem := lipgloss.NewStyle().Render
|
||||
|
||||
strs := make([]string, len(columns))
|
||||
for i, col := range columns {
|
||||
c := make([]string, len(col.Rows)+1)
|
||||
|
||||
c[0] = listHeader(col.Header)
|
||||
for i, row := range col.Rows {
|
||||
c[i+1] = listItem(row)
|
||||
}
|
||||
|
||||
strs[i] = list.Render(
|
||||
lipgloss.JoinVertical(
|
||||
lipgloss.Left,
|
||||
c...,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
return lipgloss.JoinHorizontal(
|
||||
lipgloss.Top,
|
||||
strs...,
|
||||
)
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient/graphql"
|
||||
)
|
||||
|
||||
func getRemoteAppInfo(
|
||||
ctx context.Context,
|
||||
ce *CliEnv,
|
||||
subdomain string,
|
||||
) (*graphql.AppSummaryFragment, error) {
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
resp, err := cl.GetOrganizationsAndWorkspacesApps(
|
||||
ctx,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get workspaces: %w", err)
|
||||
}
|
||||
|
||||
for _, workspace := range resp.Workspaces {
|
||||
for _, app := range workspace.Apps {
|
||||
if app.Subdomain == subdomain {
|
||||
return app, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, organization := range resp.Organizations {
|
||||
for _, app := range organization.Apps {
|
||||
if app.Subdomain == subdomain {
|
||||
return app, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("failed to find app with subdomain: %s", subdomain) //nolint:err113
|
||||
}
|
||||
|
||||
func (ce *CliEnv) GetAppInfo(
|
||||
ctx context.Context,
|
||||
subdomain string,
|
||||
) (*graphql.AppSummaryFragment, error) {
|
||||
if subdomain != "" {
|
||||
return getRemoteAppInfo(ctx, ce, subdomain)
|
||||
}
|
||||
|
||||
var project *graphql.AppSummaryFragment
|
||||
if err := UnmarshalFile(ce.Path.ProjectFile(), &project, json.Unmarshal); err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
project, err = ce.Link(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
ce.Warnln("Failed to find linked project: %v", err)
|
||||
ce.Infoln("Please run `nhost link` to link a project first")
|
||||
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return project, nil
|
||||
}
|
||||
@@ -1,173 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient/graphql"
|
||||
)
|
||||
|
||||
func Printlist(ce *CliEnv, orgs *graphql.GetOrganizationsAndWorkspacesApps) error {
|
||||
if len(orgs.GetWorkspaces())+len(orgs.GetOrganizations()) == 0 {
|
||||
return errors.New("no apps found") //nolint:err113
|
||||
}
|
||||
|
||||
num := Column{
|
||||
Header: "#",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
subdomain := Column{
|
||||
Header: "Subdomain",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
project := Column{
|
||||
Header: "Project",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
organization := Column{
|
||||
Header: "Organization/Workspace",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
region := Column{
|
||||
Header: "Region",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
|
||||
for _, org := range orgs.GetOrganizations() {
|
||||
for _, app := range org.Apps {
|
||||
num.Rows = append(num.Rows, strconv.Itoa(len(num.Rows)+1))
|
||||
subdomain.Rows = append(subdomain.Rows, app.Subdomain)
|
||||
project.Rows = append(project.Rows, app.Name)
|
||||
organization.Rows = append(organization.Rows, org.Name)
|
||||
region.Rows = append(region.Rows, app.Region.Name)
|
||||
}
|
||||
}
|
||||
|
||||
for _, ws := range orgs.GetWorkspaces() {
|
||||
for _, app := range ws.Apps {
|
||||
num.Rows = append(num.Rows, strconv.Itoa(len(num.Rows)+1))
|
||||
subdomain.Rows = append(subdomain.Rows, app.Subdomain)
|
||||
project.Rows = append(project.Rows, app.Name)
|
||||
organization.Rows = append(organization.Rows, ws.Name+"*")
|
||||
region.Rows = append(region.Rows, app.Region.Name)
|
||||
}
|
||||
}
|
||||
|
||||
ce.Println("%s", Table(num, subdomain, project, organization, region))
|
||||
ce.Println("* Legacy Workspace")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func confirmApp(ce *CliEnv, app *graphql.AppSummaryFragment) error {
|
||||
ce.PromptMessage("Enter project subdomain to confirm: ")
|
||||
|
||||
confirm, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
|
||||
if confirm != app.Subdomain {
|
||||
return errors.New("input doesn't match the subdomain") //nolint:err113
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getApp(
|
||||
orgs *graphql.GetOrganizationsAndWorkspacesApps,
|
||||
idx string,
|
||||
) (*graphql.AppSummaryFragment, error) {
|
||||
x := 1
|
||||
|
||||
var app *graphql.AppSummaryFragment
|
||||
|
||||
OUTER:
|
||||
|
||||
for _, orgs := range orgs.GetOrganizations() {
|
||||
for _, a := range orgs.GetApps() {
|
||||
if strconv.Itoa(x) == idx {
|
||||
a := a
|
||||
app = a
|
||||
|
||||
break OUTER
|
||||
}
|
||||
|
||||
x++
|
||||
}
|
||||
}
|
||||
|
||||
if app != nil {
|
||||
return app, nil
|
||||
}
|
||||
|
||||
OUTER2:
|
||||
for _, ws := range orgs.GetWorkspaces() {
|
||||
for _, a := range ws.GetApps() {
|
||||
if strconv.Itoa(x) == idx {
|
||||
a := a
|
||||
app = a
|
||||
|
||||
break OUTER2
|
||||
}
|
||||
|
||||
x++
|
||||
}
|
||||
}
|
||||
|
||||
if app == nil {
|
||||
return nil, errors.New("invalid input") //nolint:err113
|
||||
}
|
||||
|
||||
return app, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Link(ctx context.Context) (*graphql.AppSummaryFragment, error) {
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
orgs, err := cl.GetOrganizationsAndWorkspacesApps(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get workspaces: %w", err)
|
||||
}
|
||||
|
||||
if len(orgs.GetWorkspaces())+len(orgs.GetOrganizations()) == 0 {
|
||||
return nil, errors.New("no apps found") //nolint:err113
|
||||
}
|
||||
|
||||
if err := Printlist(ce, orgs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ce.PromptMessage("Select the workspace # to link: ")
|
||||
|
||||
idx, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read workspace: %w", err)
|
||||
}
|
||||
|
||||
app, err := getApp(orgs, idx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := confirmApp(ce, app); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(ce.Path.DotNhostFolder(), 0o755); err != nil { //nolint:mnd
|
||||
return nil, fmt.Errorf("failed to create .nhost folder: %w", err)
|
||||
}
|
||||
|
||||
if err := MarshalFile(app, ce.Path.ProjectFile(), json.Marshal); err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal project information: %w", err)
|
||||
}
|
||||
|
||||
return app, nil
|
||||
}
|
||||
@@ -1,296 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto"
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"encoding/json"
|
||||
"encoding/pem"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient"
|
||||
"github.com/nhost/nhost/cli/nhostclient/credentials"
|
||||
"github.com/nhost/nhost/cli/ssl"
|
||||
)
|
||||
|
||||
func savePAT(
|
||||
ce *CliEnv,
|
||||
session credentials.Credentials,
|
||||
) error {
|
||||
dir := filepath.Dir(ce.Path.AuthFile())
|
||||
if !PathExists(dir) {
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil { //nolint:mnd
|
||||
return fmt.Errorf("failed to create dir: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := MarshalFile(session, ce.Path.AuthFile(), json.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to write PAT to file: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func signinHandler(ch chan<- string) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ch <- r.URL.Query().Get("refreshToken")
|
||||
|
||||
fmt.Fprintf(w, "You may now close this window.")
|
||||
}
|
||||
}
|
||||
|
||||
func openBrowser(ctx context.Context, url string) error {
|
||||
var (
|
||||
cmd string
|
||||
args []string
|
||||
)
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "darwin":
|
||||
cmd = "open"
|
||||
default: // "linux", "freebsd", "openbsd", "netbsd"
|
||||
cmd = "xdg-open"
|
||||
}
|
||||
|
||||
args = append(args, url)
|
||||
if err := exec.CommandContext(ctx, cmd, args...).Start(); err != nil {
|
||||
return fmt.Errorf("failed to open browser: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTLSServer() (*http.Server, error) {
|
||||
block, _ := pem.Decode(ssl.LocalKeyFile)
|
||||
// Parse the PEM data to obtain the private key
|
||||
privateKey, err := x509.ParsePKCS8PrivateKey(block.Bytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse private key: %w", err)
|
||||
}
|
||||
|
||||
// Type assert the private key to crypto.PrivateKey
|
||||
pk, ok := privateKey.(crypto.PrivateKey)
|
||||
if !ok {
|
||||
return nil, errors.New( //nolint:err113
|
||||
"failed to type assert private key to crypto.PrivateKey",
|
||||
)
|
||||
}
|
||||
|
||||
block, _ = pem.Decode(ssl.LocalCertFile)
|
||||
|
||||
certificate, err := x509.ParseCertificate(block.Bytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse certificate: %w", err)
|
||||
}
|
||||
|
||||
tlsConfig := &tls.Config{ //nolint:exhaustruct
|
||||
MinVersion: tls.VersionTLS12,
|
||||
CipherSuites: nil,
|
||||
Certificates: []tls.Certificate{
|
||||
{ //nolint:exhaustruct
|
||||
Certificate: [][]byte{certificate.Raw},
|
||||
PrivateKey: pk,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return &http.Server{ //nolint:exhaustruct
|
||||
Addr: ":8099",
|
||||
TLSConfig: tlsConfig,
|
||||
ReadHeaderTimeout: time.Second * 10, //nolint:mnd
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) loginPAT(pat string) credentials.Credentials {
|
||||
session := credentials.Credentials{
|
||||
ID: "",
|
||||
PersonalAccessToken: pat,
|
||||
}
|
||||
|
||||
return session
|
||||
}
|
||||
|
||||
func (ce *CliEnv) loginEmailPassword(
|
||||
ctx context.Context,
|
||||
email string,
|
||||
password string,
|
||||
) (credentials.Credentials, error) {
|
||||
cl := nhostclient.New(ce.AuthURL(), ce.GraphqlURL())
|
||||
|
||||
var err error
|
||||
|
||||
if email == "" {
|
||||
ce.PromptMessage("email: ")
|
||||
|
||||
email, err = ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to read email: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if password == "" {
|
||||
ce.PromptMessage("password: ")
|
||||
password, err = ce.PromptInput(true)
|
||||
ce.Println("")
|
||||
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to read password: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
ce.Infoln("Authenticating")
|
||||
|
||||
loginResp, err := cl.Login(ctx, email, password)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to login: %w", err)
|
||||
}
|
||||
|
||||
session, err := cl.CreatePAT(ctx, loginResp.Session.AccessToken)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to create PAT: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Successfully logged in")
|
||||
|
||||
return session, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) loginGithub(ctx context.Context) (credentials.Credentials, error) {
|
||||
refreshToken := make(chan string)
|
||||
http.HandleFunc("/signin", signinHandler(refreshToken))
|
||||
|
||||
go func() {
|
||||
server, err := getTLSServer()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if err := server.ListenAndServeTLS("", ""); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}()
|
||||
|
||||
signinPage := ce.AuthURL() + "/signin/provider/github/?redirectTo=https://local.dashboard.local.nhost.run:8099/signin"
|
||||
ce.Infoln("Opening browser to sign-in")
|
||||
|
||||
if err := openBrowser(ctx, signinPage); err != nil {
|
||||
return credentials.Credentials{}, err
|
||||
}
|
||||
|
||||
ce.Infoln("Waiting for sign-in to complete")
|
||||
|
||||
refreshTokenValue := <-refreshToken
|
||||
|
||||
cl := nhostclient.New(ce.AuthURL(), ce.GraphqlURL())
|
||||
|
||||
refreshTokenResp, err := cl.RefreshToken(ctx, refreshTokenValue)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to get access token: %w", err)
|
||||
}
|
||||
|
||||
session, err := cl.CreatePAT(ctx, refreshTokenResp.AccessToken)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to create PAT: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Successfully logged in")
|
||||
|
||||
return session, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) loginMethod(ctx context.Context) (credentials.Credentials, error) {
|
||||
ce.Infoln("Select authentication method:\n1. PAT\n2. Email/Password\n3. Github")
|
||||
ce.PromptMessage("method: ")
|
||||
|
||||
method, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf(
|
||||
"failed to read authentication method: %w",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
var session credentials.Credentials
|
||||
|
||||
switch method {
|
||||
case "1":
|
||||
ce.PromptMessage("PAT: ")
|
||||
|
||||
pat, err := ce.PromptInput(true)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to read PAT: %w", err)
|
||||
}
|
||||
|
||||
session = ce.loginPAT(pat)
|
||||
case "2":
|
||||
session, err = ce.loginEmailPassword(ctx, "", "")
|
||||
case "3":
|
||||
session, err = ce.loginGithub(ctx)
|
||||
default:
|
||||
return ce.loginMethod(ctx)
|
||||
}
|
||||
|
||||
return session, err
|
||||
}
|
||||
|
||||
func (ce *CliEnv) verifyEmail(
|
||||
ctx context.Context,
|
||||
email string,
|
||||
) error {
|
||||
ce.Infoln("Your email address is not verified")
|
||||
|
||||
cl := nhostclient.New(ce.AuthURL(), ce.GraphqlURL())
|
||||
if err := cl.VerifyEmail(ctx, email); err != nil {
|
||||
return fmt.Errorf("failed to send verification email: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("A verification email has been sent to %s", email)
|
||||
ce.Infoln("Please verify your email address and try again")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Login(
|
||||
ctx context.Context,
|
||||
pat string,
|
||||
email string,
|
||||
password string,
|
||||
) (credentials.Credentials, error) {
|
||||
var (
|
||||
session credentials.Credentials
|
||||
err error
|
||||
)
|
||||
|
||||
switch {
|
||||
case pat != "":
|
||||
session = ce.loginPAT(pat)
|
||||
case email != "" || password != "":
|
||||
session, err = ce.loginEmailPassword(ctx, email, password)
|
||||
default:
|
||||
session, err = ce.loginMethod(ctx)
|
||||
}
|
||||
|
||||
var reqErr *nhostclient.RequestError
|
||||
if errors.As(err, &reqErr) && reqErr.ErrorCode == "unverified-user" {
|
||||
return credentials.Credentials{}, ce.verifyEmail(ctx, email)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return session, err
|
||||
}
|
||||
|
||||
if err := savePAT(ce, session); err != nil {
|
||||
return credentials.Credentials{}, err
|
||||
}
|
||||
|
||||
return session, nil
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
var ErrNoContent = errors.New("no content")
|
||||
|
||||
func UnmarshalFile(filepath string, v any, f func([]byte, any) error) error {
|
||||
r, err := os.OpenFile(filepath, os.O_RDONLY, 0o600) //nolint:mnd
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open file: %w", err)
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
b, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read contents of reader: %w", err)
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
return ErrNoContent
|
||||
}
|
||||
|
||||
if err := f(b, v); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal object: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func MarshalFile(v any, filepath string, fn func(any) ([]byte, error)) error {
|
||||
f, err := os.OpenFile(filepath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o600) //nolint:mnd
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open file: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
b, err := fn(v)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error marshalling object: %w", err)
|
||||
}
|
||||
|
||||
if _, err := f.Write(b); err != nil {
|
||||
return fmt.Errorf("error writing marshalled object: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient"
|
||||
"github.com/nhost/nhost/cli/nhostclient/credentials"
|
||||
)
|
||||
|
||||
func (ce *CliEnv) LoadSession(
|
||||
ctx context.Context,
|
||||
) (credentials.Session, error) {
|
||||
var creds credentials.Credentials
|
||||
if err := UnmarshalFile(ce.Path.AuthFile(), &creds, json.Unmarshal); err != nil {
|
||||
creds, err = ce.Login(ctx, "", "", "")
|
||||
if err != nil {
|
||||
return credentials.Session{}, fmt.Errorf("failed to login: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
cl := nhostclient.New(ce.AuthURL(), ce.GraphqlURL())
|
||||
|
||||
session, err := cl.LoginPAT(ctx, creds.PersonalAccessToken)
|
||||
if err != nil {
|
||||
return credentials.Session{}, fmt.Errorf("failed to login: %w", err)
|
||||
}
|
||||
|
||||
return session, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Credentials() (credentials.Credentials, error) {
|
||||
var creds credentials.Credentials
|
||||
if err := UnmarshalFile(ce.Path.AuthFile(), &creds, json.Unmarshal); err != nil {
|
||||
return credentials.Credentials{}, err
|
||||
}
|
||||
|
||||
return creds, nil
|
||||
}
|
||||
@@ -1,106 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func CommandApply() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "apply",
|
||||
Aliases: []string{},
|
||||
Usage: "Apply configuration to cloud project",
|
||||
Action: commandApply,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "Subdomain of the Nhost project to apply configuration to. Defaults to linked project",
|
||||
Required: true,
|
||||
Sources: cli.EnvVars("NHOST_SUBDOMAIN"),
|
||||
},
|
||||
&cli.BoolFlag{ //nolint:exhaustruct
|
||||
Name: flagYes,
|
||||
Usage: "Skip confirmation",
|
||||
Sources: cli.EnvVars("NHOST_YES"),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func commandApply(ctx context.Context, cmd *cli.Command) error {
|
||||
ce := clienv.FromCLI(cmd)
|
||||
|
||||
proj, err := ce.GetAppInfo(ctx, cmd.String(flagSubdomain))
|
||||
if err != nil {
|
||||
return cli.Exit(fmt.Sprintf("Failed to get app info: %v", err), 1)
|
||||
}
|
||||
|
||||
ce.Infoln("Validating configuration...")
|
||||
|
||||
cfg, _, err := ValidateRemote(
|
||||
ctx,
|
||||
ce,
|
||||
proj.GetSubdomain(),
|
||||
proj.GetID(),
|
||||
)
|
||||
if err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
if err := Apply(ctx, ce, proj.ID, cfg, cmd.Bool(flagYes)); err != nil {
|
||||
return cli.Exit(err.Error(), 1)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Apply(
|
||||
ctx context.Context,
|
||||
ce *clienv.CliEnv,
|
||||
appID string,
|
||||
cfg *model.ConfigConfig,
|
||||
skipConfirmation bool,
|
||||
) error {
|
||||
if !skipConfirmation {
|
||||
ce.PromptMessage(
|
||||
"We are going to overwrite the project's configuration. Do you want to proceed? [y/N] ",
|
||||
)
|
||||
|
||||
resp, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
|
||||
if resp != "y" && resp != "Y" {
|
||||
return errors.New("aborting") //nolint:err113
|
||||
}
|
||||
}
|
||||
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
b, err := json.Marshal(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal config: %w", err)
|
||||
}
|
||||
|
||||
if _, err := cl.ReplaceConfigRawJSON(
|
||||
ctx,
|
||||
appID,
|
||||
string(b),
|
||||
); err != nil {
|
||||
return fmt.Errorf("failed to apply config: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Configuration applied successfully!")
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
package config
|
||||
|
||||
import "github.com/urfave/cli/v3"
|
||||
|
||||
const flagSubdomain = "subdomain"
|
||||
|
||||
func Command() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "config",
|
||||
Aliases: []string{},
|
||||
Usage: "Perform config operations",
|
||||
Commands: []*cli.Command{
|
||||
CommandDefault(),
|
||||
CommandExample(),
|
||||
CommandApply(),
|
||||
CommandPull(),
|
||||
CommandShow(),
|
||||
CommandValidate(),
|
||||
CommandEdit(),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/project"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func CommandDefault() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "default",
|
||||
Aliases: []string{},
|
||||
Usage: "Create default configuration and secrets",
|
||||
Action: commandDefault,
|
||||
Flags: []cli.Flag{},
|
||||
}
|
||||
}
|
||||
|
||||
func commandDefault(_ context.Context, cmd *cli.Command) error {
|
||||
ce := clienv.FromCLI(cmd)
|
||||
|
||||
if err := os.MkdirAll(ce.Path.NhostFolder(), 0o755); err != nil { //nolint:mnd
|
||||
return fmt.Errorf("failed to create nhost folder: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Initializing Nhost project")
|
||||
|
||||
if err := InitConfigAndSecrets(ce); err != nil {
|
||||
return fmt.Errorf("failed to initialize project: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Successfully generated default configuration and secrets")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func InitConfigAndSecrets(ce *clienv.CliEnv) error {
|
||||
config, err := project.DefaultConfig()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create default config: %w", err)
|
||||
}
|
||||
|
||||
if err := clienv.MarshalFile(config, ce.Path.NhostToml(), toml.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to save config: %w", err)
|
||||
}
|
||||
|
||||
secrets := project.DefaultSecrets()
|
||||
if err := clienv.MarshalFile(secrets, ce.Path.Secrets(), env.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to save secrets: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v3"
|
||||
"github.com/wI2L/jsondiff"
|
||||
)
|
||||
|
||||
const (
|
||||
flagEditor = "editor"
|
||||
)
|
||||
|
||||
func CommandEdit() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "edit",
|
||||
Aliases: []string{},
|
||||
Usage: "Edit base configuration or an overlay",
|
||||
Action: edit,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "If specified, edit this subdomain's overlay, otherwise edit base configuation",
|
||||
Sources: cli.EnvVars("NHOST_SUBDOMAIN"),
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagEditor,
|
||||
Usage: "Editor to use",
|
||||
Value: "vim",
|
||||
Sources: cli.EnvVars("EDITOR"),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func EditFile(ctx context.Context, editor, filepath string) error {
|
||||
cmd := exec.CommandContext(
|
||||
ctx,
|
||||
editor,
|
||||
filepath,
|
||||
)
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
return fmt.Errorf("failed to open editor: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func CopyConfig[T any](src, dst, overlayPath string) error {
|
||||
var cfg *T
|
||||
if err := clienv.UnmarshalFile(src, &cfg, toml.Unmarshal); err != nil {
|
||||
return fmt.Errorf("failed to parse config: %w", err)
|
||||
}
|
||||
|
||||
var err error
|
||||
if clienv.PathExists(overlayPath) {
|
||||
cfg, err = ApplyJSONPatches(*cfg, overlayPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := clienv.MarshalFile(cfg, dst, toml.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to save temporary file: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readFile(filepath string) (any, error) {
|
||||
f, err := os.Open(filepath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open file: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
b, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
var v any
|
||||
if err := toml.Unmarshal(b, &v); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal toml: %w", err)
|
||||
}
|
||||
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func GenerateJSONPatch(origfilepath, newfilepath, dst string) error {
|
||||
origo, err := readFile(origfilepath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert original toml to json: %w", err)
|
||||
}
|
||||
|
||||
newo, err := readFile(newfilepath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert new toml to json: %w", err)
|
||||
}
|
||||
|
||||
patches, err := jsondiff.Compare(origo, newo)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate json patch: %w", err)
|
||||
}
|
||||
|
||||
dstf, err := os.OpenFile(dst, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0o644) //nolint:mnd
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open destination file: %w", err)
|
||||
}
|
||||
defer dstf.Close()
|
||||
|
||||
sort.Slice(patches, func(i, j int) bool {
|
||||
return patches[i].Path < patches[j].Path
|
||||
})
|
||||
|
||||
dstb, err := json.MarshalIndent(patches, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prettify json: %w", err)
|
||||
}
|
||||
|
||||
if _, err := dstf.Write(dstb); err != nil {
|
||||
return fmt.Errorf("failed to write to destination file: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func edit(ctx context.Context, cmd *cli.Command) error {
|
||||
ce := clienv.FromCLI(cmd)
|
||||
|
||||
if cmd.String(flagSubdomain) == "" {
|
||||
if err := EditFile(ctx, cmd.String(flagEditor), ce.Path.NhostToml()); err != nil {
|
||||
return fmt.Errorf("failed to edit config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(ce.Path.OverlaysFolder(), 0o755); err != nil { //nolint:mnd
|
||||
return fmt.Errorf("failed to create json patches directory: %w", err)
|
||||
}
|
||||
|
||||
tmpdir, err := os.MkdirTemp(os.TempDir(), "nhost-jsonpatch")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create temporary directory: %w", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpdir)
|
||||
|
||||
tmpfileName := filepath.Join(tmpdir, "nhost.toml")
|
||||
|
||||
if err := CopyConfig[model.ConfigConfig](
|
||||
ce.Path.NhostToml(), tmpfileName, ce.Path.Overlay(cmd.String(flagSubdomain)),
|
||||
); err != nil {
|
||||
return fmt.Errorf("failed to copy config: %w", err)
|
||||
}
|
||||
|
||||
if err := EditFile(ctx, cmd.String(flagEditor), tmpfileName); err != nil {
|
||||
return fmt.Errorf("failed to edit config: %w", err)
|
||||
}
|
||||
|
||||
if err := GenerateJSONPatch(
|
||||
ce.Path.NhostToml(), tmpfileName, ce.Path.Overlay(cmd.String(flagSubdomain)),
|
||||
); err != nil {
|
||||
return fmt.Errorf("failed to generate json patch: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,555 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/be/services/mimir/schema"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func CommandExample() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "example",
|
||||
Aliases: []string{},
|
||||
Usage: "Shows an example config file",
|
||||
Action: commandExample,
|
||||
Flags: []cli.Flag{},
|
||||
}
|
||||
}
|
||||
|
||||
func ptr[T any](v T) *T { return &v }
|
||||
|
||||
func commandExample(_ context.Context, cmd *cli.Command) error { //nolint:funlen,maintidx
|
||||
ce := clienv.FromCLI(cmd)
|
||||
|
||||
//nolint:mnd
|
||||
cfg := model.ConfigConfig{
|
||||
Global: &model.ConfigGlobal{
|
||||
Environment: []*model.ConfigGlobalEnvironmentVariable{
|
||||
{
|
||||
Name: "NAME",
|
||||
Value: "value",
|
||||
},
|
||||
},
|
||||
},
|
||||
Ai: &model.ConfigAI{
|
||||
Version: ptr("0.3.0"),
|
||||
Resources: &model.ConfigAIResources{
|
||||
Compute: &model.ConfigComputeResources{
|
||||
Cpu: 256,
|
||||
Memory: 512,
|
||||
},
|
||||
},
|
||||
Openai: &model.ConfigAIOpenai{
|
||||
Organization: ptr("org-id"),
|
||||
ApiKey: "opeanai-api-key",
|
||||
},
|
||||
AutoEmbeddings: &model.ConfigAIAutoEmbeddings{
|
||||
SynchPeriodMinutes: ptr(uint32(10)),
|
||||
},
|
||||
WebhookSecret: "this-is-a-webhook-secret",
|
||||
},
|
||||
Graphql: &model.ConfigGraphql{
|
||||
Security: &model.ConfigGraphqlSecurity{
|
||||
ForbidAminSecret: ptr(true),
|
||||
MaxDepthQueries: ptr(uint(4)),
|
||||
},
|
||||
},
|
||||
Hasura: &model.ConfigHasura{
|
||||
Version: new(string),
|
||||
JwtSecrets: []*model.ConfigJWTSecret{
|
||||
{
|
||||
Type: ptr("HS256"),
|
||||
Key: ptr("secret"),
|
||||
},
|
||||
},
|
||||
AdminSecret: "adminsecret",
|
||||
WebhookSecret: "webhooksecret",
|
||||
Settings: &model.ConfigHasuraSettings{
|
||||
CorsDomain: []string{"*"},
|
||||
DevMode: ptr(false),
|
||||
EnableAllowList: ptr(true),
|
||||
EnableConsole: ptr(true),
|
||||
EnableRemoteSchemaPermissions: ptr(true),
|
||||
EnabledAPIs: []string{
|
||||
"metadata",
|
||||
},
|
||||
InferFunctionPermissions: ptr(true),
|
||||
LiveQueriesMultiplexedRefetchInterval: ptr(uint32(1000)),
|
||||
StringifyNumericTypes: ptr(false),
|
||||
},
|
||||
AuthHook: &model.ConfigHasuraAuthHook{
|
||||
Url: "https://customauth.example.com/hook",
|
||||
Mode: ptr("POST"),
|
||||
SendRequestBody: ptr(true),
|
||||
},
|
||||
Logs: &model.ConfigHasuraLogs{
|
||||
Level: ptr("warn"),
|
||||
},
|
||||
Events: &model.ConfigHasuraEvents{
|
||||
HttpPoolSize: ptr(uint32(10)),
|
||||
},
|
||||
Resources: &model.ConfigResources{
|
||||
Compute: &model.ConfigResourcesCompute{
|
||||
Cpu: 500,
|
||||
Memory: 1024,
|
||||
},
|
||||
Replicas: ptr(uint8(1)),
|
||||
Networking: &model.ConfigNetworking{
|
||||
Ingresses: []*model.ConfigIngress{
|
||||
{
|
||||
Fqdn: []string{"hasura.example.com"},
|
||||
Tls: &model.ConfigIngressTls{
|
||||
ClientCA: ptr(
|
||||
"---BEGIN CERTIFICATE---\n...\n---END CERTIFICATE---",
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Autoscaler: nil,
|
||||
},
|
||||
RateLimit: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "15m",
|
||||
},
|
||||
},
|
||||
Functions: &model.ConfigFunctions{
|
||||
Node: &model.ConfigFunctionsNode{
|
||||
Version: ptr(int(22)),
|
||||
},
|
||||
Resources: &model.ConfigFunctionsResources{
|
||||
Networking: &model.ConfigNetworking{
|
||||
Ingresses: []*model.ConfigIngress{
|
||||
{
|
||||
Fqdn: []string{"hasura.example.com"},
|
||||
Tls: &model.ConfigIngressTls{
|
||||
ClientCA: ptr(
|
||||
"---BEGIN CERTIFICATE---\n...\n---END CERTIFICATE---",
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
RateLimit: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "15m",
|
||||
},
|
||||
},
|
||||
Auth: &model.ConfigAuth{
|
||||
Version: ptr("0.25.0"),
|
||||
Misc: &model.ConfigAuthMisc{
|
||||
ConcealErrors: ptr(false),
|
||||
},
|
||||
ElevatedPrivileges: &model.ConfigAuthElevatedPrivileges{
|
||||
Mode: ptr("required"),
|
||||
},
|
||||
Resources: &model.ConfigResources{
|
||||
Compute: &model.ConfigResourcesCompute{
|
||||
Cpu: 250,
|
||||
Memory: 512,
|
||||
},
|
||||
Replicas: ptr(uint8(1)),
|
||||
Networking: &model.ConfigNetworking{
|
||||
Ingresses: []*model.ConfigIngress{
|
||||
{
|
||||
Fqdn: []string{"auth.example.com"},
|
||||
Tls: &model.ConfigIngressTls{
|
||||
ClientCA: ptr(
|
||||
"---BEGIN CERTIFICATE---\n...\n---END CERTIFICATE---",
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Autoscaler: nil,
|
||||
},
|
||||
Redirections: &model.ConfigAuthRedirections{
|
||||
ClientUrl: ptr("https://example.com"),
|
||||
AllowedUrls: []string{
|
||||
"https://example.com",
|
||||
},
|
||||
},
|
||||
SignUp: &model.ConfigAuthSignUp{
|
||||
Enabled: ptr(true),
|
||||
DisableNewUsers: ptr(false),
|
||||
Turnstile: &model.ConfigAuthSignUpTurnstile{
|
||||
SecretKey: "turnstileSecretKey",
|
||||
},
|
||||
},
|
||||
User: &model.ConfigAuthUser{
|
||||
Roles: &model.ConfigAuthUserRoles{
|
||||
Default: ptr("user"),
|
||||
Allowed: []string{"user", "me"},
|
||||
},
|
||||
Locale: &model.ConfigAuthUserLocale{
|
||||
Default: ptr("en"),
|
||||
Allowed: []string{"en"},
|
||||
},
|
||||
Gravatar: &model.ConfigAuthUserGravatar{
|
||||
Enabled: ptr(true),
|
||||
Default: ptr("identicon"),
|
||||
Rating: ptr("g"),
|
||||
},
|
||||
Email: &model.ConfigAuthUserEmail{
|
||||
Allowed: []string{"asd@example.org"},
|
||||
Blocked: []string{"asd@example.com"},
|
||||
},
|
||||
EmailDomains: &model.ConfigAuthUserEmailDomains{
|
||||
Allowed: []string{"example.com"},
|
||||
Blocked: []string{"example.org"},
|
||||
},
|
||||
},
|
||||
Session: &model.ConfigAuthSession{
|
||||
AccessToken: &model.ConfigAuthSessionAccessToken{
|
||||
ExpiresIn: ptr(uint32(3600)),
|
||||
CustomClaims: []*model.ConfigAuthsessionaccessTokenCustomClaims{
|
||||
{
|
||||
Key: "key",
|
||||
Value: "value",
|
||||
Default: ptr("default-value"),
|
||||
},
|
||||
},
|
||||
},
|
||||
RefreshToken: &model.ConfigAuthSessionRefreshToken{
|
||||
ExpiresIn: ptr(uint32(3600)),
|
||||
},
|
||||
},
|
||||
Method: &model.ConfigAuthMethod{
|
||||
Anonymous: &model.ConfigAuthMethodAnonymous{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Otp: &model.ConfigAuthMethodOtp{
|
||||
Email: &model.ConfigAuthMethodOtpEmail{
|
||||
Enabled: ptr(true),
|
||||
},
|
||||
},
|
||||
EmailPasswordless: &model.ConfigAuthMethodEmailPasswordless{
|
||||
Enabled: ptr(true),
|
||||
},
|
||||
EmailPassword: &model.ConfigAuthMethodEmailPassword{
|
||||
HibpEnabled: ptr(true),
|
||||
EmailVerificationRequired: ptr(true),
|
||||
PasswordMinLength: ptr(uint8(12)),
|
||||
},
|
||||
SmsPasswordless: &model.ConfigAuthMethodSmsPasswordless{
|
||||
Enabled: ptr(true),
|
||||
},
|
||||
Oauth: &model.ConfigAuthMethodOauth{
|
||||
Apple: &model.ConfigAuthMethodOauthApple{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
KeyId: ptr("keyid"),
|
||||
TeamId: ptr("teamid"),
|
||||
Scope: []string{"scope"},
|
||||
PrivateKey: ptr("privatekey"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Azuread: &model.ConfigAuthMethodOauthAzuread{
|
||||
Tenant: ptr("tenant"),
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
},
|
||||
Bitbucket: &model.ConfigStandardOauthProvider{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
},
|
||||
Discord: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Entraid: &model.ConfigAuthMethodOauthEntraid{
|
||||
ClientId: ptr("entraidClientId"),
|
||||
ClientSecret: ptr("entraidClientSecret"),
|
||||
Enabled: ptr(true),
|
||||
Tenant: ptr("entraidTenant"),
|
||||
},
|
||||
Facebook: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Github: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Gitlab: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Google: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Linkedin: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Spotify: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Strava: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Twitch: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Twitter: &model.ConfigAuthMethodOauthTwitter{
|
||||
Enabled: ptr(true),
|
||||
ConsumerKey: ptr("consumerkey"),
|
||||
ConsumerSecret: ptr("consumersecret"),
|
||||
},
|
||||
Windowslive: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Workos: &model.ConfigAuthMethodOauthWorkos{
|
||||
Connection: ptr("connection"),
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Organization: ptr("organization"),
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
},
|
||||
},
|
||||
Webauthn: &model.ConfigAuthMethodWebauthn{
|
||||
Enabled: ptr(true),
|
||||
RelyingParty: &model.ConfigAuthMethodWebauthnRelyingParty{
|
||||
Id: ptr("example.com"),
|
||||
Name: ptr("name"),
|
||||
Origins: []string{
|
||||
"https://example.com",
|
||||
},
|
||||
},
|
||||
Attestation: &model.ConfigAuthMethodWebauthnAttestation{
|
||||
Timeout: ptr(uint32(60000)),
|
||||
},
|
||||
},
|
||||
},
|
||||
Totp: &model.ConfigAuthTotp{
|
||||
Enabled: ptr(true),
|
||||
Issuer: ptr("issuer"),
|
||||
},
|
||||
RateLimit: &model.ConfigAuthRateLimit{
|
||||
Emails: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Sms: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
BruteForce: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Signups: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Global: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "15m",
|
||||
},
|
||||
},
|
||||
},
|
||||
Postgres: &model.ConfigPostgres{
|
||||
Version: ptr("14-20230312-1"),
|
||||
Resources: &model.ConfigPostgresResources{
|
||||
Compute: &model.ConfigResourcesCompute{
|
||||
Cpu: 2000,
|
||||
Memory: 4096,
|
||||
},
|
||||
EnablePublicAccess: ptr(true),
|
||||
Storage: &model.ConfigPostgresResourcesStorage{
|
||||
Capacity: 20,
|
||||
},
|
||||
Replicas: nil,
|
||||
},
|
||||
Settings: &model.ConfigPostgresSettings{
|
||||
Jit: ptr("off"),
|
||||
MaxConnections: ptr(int32(100)),
|
||||
SharedBuffers: ptr("128MB"),
|
||||
EffectiveCacheSize: ptr("4GB"),
|
||||
MaintenanceWorkMem: ptr("64MB"),
|
||||
CheckpointCompletionTarget: ptr(float64(0.9)),
|
||||
WalBuffers: ptr("-1"),
|
||||
DefaultStatisticsTarget: ptr(int32(100)),
|
||||
RandomPageCost: ptr(float64(4)),
|
||||
EffectiveIOConcurrency: ptr(int32(1)),
|
||||
WorkMem: ptr("4MB"),
|
||||
HugePages: ptr("try"),
|
||||
MinWalSize: ptr("80MB"),
|
||||
MaxWalSize: ptr("1GB"),
|
||||
MaxWorkerProcesses: ptr(int32(8)),
|
||||
MaxParallelWorkersPerGather: ptr(int32(2)),
|
||||
MaxParallelWorkers: ptr(int32(8)),
|
||||
MaxParallelMaintenanceWorkers: ptr(int32(2)),
|
||||
WalLevel: ptr("replica"),
|
||||
MaxWalSenders: ptr(int32(10)),
|
||||
MaxReplicationSlots: ptr(int32(10)),
|
||||
ArchiveTimeout: ptr(int32(300)),
|
||||
TrackIoTiming: ptr("off"),
|
||||
},
|
||||
Pitr: &model.ConfigPostgresPitr{
|
||||
Retention: ptr(uint8(7)),
|
||||
},
|
||||
},
|
||||
Provider: &model.ConfigProvider{
|
||||
Smtp: &model.ConfigSmtp{
|
||||
User: "smtpUser",
|
||||
Password: "smtpPassword",
|
||||
Sender: "smtpSender",
|
||||
Host: "smtpHost",
|
||||
Port: 587, //nolint:mnd
|
||||
Secure: true,
|
||||
Method: "LOGIN",
|
||||
},
|
||||
Sms: &model.ConfigSms{
|
||||
Provider: ptr("twilio"),
|
||||
AccountSid: "twilioAccountSid",
|
||||
AuthToken: "twilioAuthToken",
|
||||
MessagingServiceId: "twilioMessagingServiceId",
|
||||
},
|
||||
},
|
||||
Storage: &model.ConfigStorage{
|
||||
Version: ptr("0.3.5"),
|
||||
Antivirus: &model.ConfigStorageAntivirus{
|
||||
Server: ptr("tcp://run-clamav:3310"),
|
||||
},
|
||||
Resources: &model.ConfigResources{
|
||||
Compute: &model.ConfigResourcesCompute{
|
||||
Cpu: 500,
|
||||
Memory: 1024,
|
||||
},
|
||||
Networking: nil,
|
||||
Replicas: ptr(uint8(1)),
|
||||
Autoscaler: nil,
|
||||
},
|
||||
RateLimit: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "15m",
|
||||
},
|
||||
},
|
||||
Observability: &model.ConfigObservability{
|
||||
Grafana: &model.ConfigGrafana{
|
||||
AdminPassword: "grafanaAdminPassword",
|
||||
Smtp: &model.ConfigGrafanaSmtp{
|
||||
Host: "localhost",
|
||||
Port: 25,
|
||||
Sender: "admin@localhost",
|
||||
User: "smtpUser",
|
||||
Password: "smtpPassword",
|
||||
},
|
||||
Alerting: &model.ConfigGrafanaAlerting{
|
||||
Enabled: ptr(true),
|
||||
},
|
||||
Contacts: &model.ConfigGrafanaContacts{
|
||||
Emails: []string{
|
||||
"engineering@acme.com",
|
||||
},
|
||||
Pagerduty: []*model.ConfigGrafanacontactsPagerduty{
|
||||
{
|
||||
IntegrationKey: "integration-key",
|
||||
Severity: "critical",
|
||||
Class: "infra",
|
||||
Component: "backend",
|
||||
Group: "group",
|
||||
},
|
||||
},
|
||||
Discord: []*model.ConfigGrafanacontactsDiscord{
|
||||
{
|
||||
Url: "https://discord.com/api/webhooks/...",
|
||||
AvatarUrl: "https://discord.com/api/avatar/...",
|
||||
},
|
||||
},
|
||||
Slack: []*model.ConfigGrafanacontactsSlack{
|
||||
{
|
||||
Recipient: "recipient",
|
||||
Token: "token",
|
||||
Username: "username",
|
||||
IconEmoji: "danger",
|
||||
IconURL: "https://...",
|
||||
MentionUsers: []string{
|
||||
"user1", "user2",
|
||||
},
|
||||
MentionGroups: []string{
|
||||
"group1", "group2",
|
||||
},
|
||||
MentionChannel: "channel",
|
||||
Url: "https://slack.com/api/webhooks/...",
|
||||
EndpointURL: "https://slack.com/api/endpoint/...",
|
||||
},
|
||||
},
|
||||
Webhook: []*model.ConfigGrafanacontactsWebhook{
|
||||
{
|
||||
Url: "https://webhook.example.com",
|
||||
HttpMethod: "POST",
|
||||
Username: "user",
|
||||
Password: "password",
|
||||
AuthorizationScheme: "Bearer",
|
||||
AuthorizationCredentials: "token",
|
||||
MaxAlerts: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
b, err := toml.Marshal(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal config: %w", err)
|
||||
}
|
||||
|
||||
sch, err := schema.New()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create schema: %w", err)
|
||||
}
|
||||
|
||||
if err := sch.ValidateConfig(cfg); err != nil {
|
||||
return fmt.Errorf("failed to validate config: %w", err)
|
||||
}
|
||||
|
||||
ce.Println("%s", b)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,205 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/nhostclient/graphql"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/nhost/nhost/cli/system"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultHasuraGraphqlAdminSecret = "nhost-admin-secret" //nolint:gosec
|
||||
DefaultGraphqlJWTSecret = "0f987876650b4a085e64594fae9219e7781b17506bec02489ad061fba8cb22db"
|
||||
DefaultNhostWebhookSecret = "nhost-webhook-secret" //nolint:gosec
|
||||
)
|
||||
|
||||
const (
|
||||
flagYes = "yes"
|
||||
)
|
||||
|
||||
func CommandPull() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "pull",
|
||||
Aliases: []string{},
|
||||
Usage: "Get cloud configuration",
|
||||
Action: commandPull,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "Pull this subdomain's configuration. Defaults to linked project",
|
||||
Sources: cli.EnvVars("NHOST_SUBDOMAIN"),
|
||||
},
|
||||
&cli.BoolFlag{ //nolint:exhaustruct
|
||||
Name: flagYes,
|
||||
Usage: "Skip confirmation",
|
||||
Sources: cli.EnvVars("NHOST_YES"),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func commandPull(ctx context.Context, cmd *cli.Command) error {
|
||||
ce := clienv.FromCLI(cmd)
|
||||
|
||||
skipConfirmation := cmd.Bool(flagYes)
|
||||
|
||||
if !skipConfirmation {
|
||||
if err := verifyFile(ce, ce.Path.NhostToml()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
writeSecrets := true
|
||||
|
||||
if !skipConfirmation {
|
||||
if err := verifyFile(ce, ce.Path.Secrets()); err != nil {
|
||||
writeSecrets = false
|
||||
}
|
||||
}
|
||||
|
||||
proj, err := ce.GetAppInfo(ctx, cmd.String(flagSubdomain))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get app info: %w", err)
|
||||
}
|
||||
|
||||
_, err = Pull(ctx, ce, proj, writeSecrets)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func verifyFile(ce *clienv.CliEnv, name string) error {
|
||||
if clienv.PathExists(name) {
|
||||
ce.PromptMessage("%s",
|
||||
name+" already exists. Do you want to overwrite it? [y/N] ",
|
||||
)
|
||||
|
||||
resp, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
|
||||
if resp != "y" && resp != "Y" {
|
||||
return errors.New("aborting") //nolint:err113
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func respToSecrets(env []*graphql.GetSecrets_AppSecrets, anonymize bool) model.Secrets {
|
||||
secrets := make(model.Secrets, len(env))
|
||||
for i, s := range env {
|
||||
if anonymize {
|
||||
switch s.Name {
|
||||
case "HASURA_GRAPHQL_ADMIN_SECRET":
|
||||
s.Value = DefaultHasuraGraphqlAdminSecret
|
||||
case "HASURA_GRAPHQL_JWT_SECRET":
|
||||
s.Value = DefaultGraphqlJWTSecret
|
||||
case "NHOST_WEBHOOK_SECRET":
|
||||
s.Value = DefaultNhostWebhookSecret
|
||||
default:
|
||||
s.Value = "FIXME"
|
||||
}
|
||||
}
|
||||
|
||||
secrets[i] = &model.ConfigEnvironmentVariable{
|
||||
Name: s.Name,
|
||||
Value: s.Value,
|
||||
}
|
||||
}
|
||||
|
||||
return secrets
|
||||
}
|
||||
|
||||
func pullSecrets(
|
||||
ctx context.Context,
|
||||
ce *clienv.CliEnv,
|
||||
proj *graphql.AppSummaryFragment,
|
||||
) error {
|
||||
ce.Infoln("Getting secrets list from Nhost...")
|
||||
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
resp, err := cl.GetSecrets(
|
||||
ctx,
|
||||
proj.ID,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get secrets: %w", err)
|
||||
}
|
||||
|
||||
secrets := respToSecrets(resp.GetAppSecrets(), true)
|
||||
if err := clienv.MarshalFile(&secrets, ce.Path.Secrets(), env.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to save nhost.toml: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Adding .secrets to .gitignore...")
|
||||
|
||||
if err := system.AddToGitignore("\n.secrets\n"); err != nil {
|
||||
return fmt.Errorf("failed to add .secrets to .gitignore: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Pull(
|
||||
ctx context.Context,
|
||||
ce *clienv.CliEnv,
|
||||
proj *graphql.AppSummaryFragment,
|
||||
writeSecrts bool,
|
||||
) (*model.ConfigConfig, error) {
|
||||
ce.Infoln("Pulling config from Nhost...")
|
||||
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
cfg, err := cl.GetConfigRawJSON(
|
||||
ctx,
|
||||
proj.ID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get config: %w", err)
|
||||
}
|
||||
|
||||
var v model.ConfigConfig
|
||||
if err := json.Unmarshal([]byte(cfg.ConfigRawJSON), &v); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal config: %w", err)
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(ce.Path.NhostFolder(), 0o755); err != nil { //nolint:mnd
|
||||
return nil, fmt.Errorf("failed to create nhost directory: %w", err)
|
||||
}
|
||||
|
||||
if err := clienv.MarshalFile(v, ce.Path.NhostToml(), toml.Marshal); err != nil {
|
||||
return nil, fmt.Errorf("failed to save nhost.toml: %w", err)
|
||||
}
|
||||
|
||||
if writeSecrts {
|
||||
if err := pullSecrets(ctx, ce, proj); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ce.Infoln("Success!")
|
||||
ce.Warnln(
|
||||
"- Review `nhost/nhost.toml` and make sure there are no secrets before you commit it to git.",
|
||||
)
|
||||
ce.Warnln("- Review `.secrets` file and set your development secrets")
|
||||
ce.Warnln("- Review `.secrets` was added to .gitignore")
|
||||
|
||||
return &v, nil
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v3"
|
||||
)
|
||||
|
||||
func CommandShow() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "show",
|
||||
Aliases: []string{},
|
||||
Usage: "Shows configuration after resolving secrets",
|
||||
Description: "Note that this command will always use the local secrets, even if you specify subdomain",
|
||||
Action: commandShow,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "Show this subdomain's rendered configuration. Defaults to base configuration",
|
||||
Sources: cli.EnvVars("NHOST_SUBDOMAIN"),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func commandShow(_ context.Context, cmd *cli.Command) error {
|
||||
ce := clienv.FromCLI(cmd)
|
||||
|
||||
var secrets model.Secrets
|
||||
if err := clienv.UnmarshalFile(ce.Path.Secrets(), &secrets, env.Unmarshal); err != nil {
|
||||
return fmt.Errorf(
|
||||
"failed to parse secrets, make sure secret values are between quotes: %w",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
cfg, err := Validate(ce, cmd.String(flagSubdomain), secrets)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b, err := toml.Marshal(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error marshalling config: %w", err)
|
||||
}
|
||||
|
||||
ce.Println("%s", b)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
HASURA_GRAPHQL_ADMIN_SECRET='nhost-admin-secret'
|
||||
HASURA_GRAPHQL_JWT_SECRET='0f987876650b4a085e64594fae9219e7781b17506bec02489ad061fba8cb22db'
|
||||
NHOST_WEBHOOK_SECRET='nhost-webhook-secret'
|
||||
GRAFANA_ADMIN_PASSWORD='grafana-admin-password'
|
||||
APPLE_CLIENT_ID='clientID'
|
||||
APPLE_KEY_ID='keyID'
|
||||
APPLE_TEAM_ID='teamID'
|
||||
APPLE_PRIVATE_KEY='privateKey'
|
||||
@@ -1,155 +0,0 @@
|
||||
[global]
|
||||
[[global.environment]]
|
||||
name = 'ENVIRONMENT'
|
||||
value = 'production'
|
||||
|
||||
[hasura]
|
||||
version = 'v2.24.1-ce'
|
||||
adminSecret = '{{ secrets.HASURA_GRAPHQL_ADMIN_SECRET }}'
|
||||
webhookSecret = '{{ secrets.NHOST_WEBHOOK_SECRET }}'
|
||||
|
||||
[[hasura.jwtSecrets]]
|
||||
type = 'HS256'
|
||||
key = '{{ secrets.HASURA_GRAPHQL_JWT_SECRET }}'
|
||||
|
||||
[hasura.settings]
|
||||
corsDomain = ['*']
|
||||
devMode = true
|
||||
enableAllowList = false
|
||||
enableConsole = true
|
||||
enableRemoteSchemaPermissions = false
|
||||
enabledAPIs = ['metadata', 'graphql', 'pgdump', 'config']
|
||||
|
||||
[hasura.logs]
|
||||
level = 'warn'
|
||||
|
||||
[hasura.events]
|
||||
httpPoolSize = 100
|
||||
|
||||
[functions]
|
||||
[functions.node]
|
||||
version = 22
|
||||
|
||||
[auth]
|
||||
version = '0.20.0'
|
||||
|
||||
[auth.redirections]
|
||||
clientUrl = 'https://my.app.com'
|
||||
|
||||
[auth.signUp]
|
||||
enabled = true
|
||||
|
||||
[auth.user]
|
||||
[auth.user.roles]
|
||||
default = 'user'
|
||||
allowed = ['user', 'me']
|
||||
|
||||
[auth.user.locale]
|
||||
default = 'en'
|
||||
allowed = ['en']
|
||||
|
||||
[auth.user.gravatar]
|
||||
enabled = true
|
||||
default = 'blank'
|
||||
rating = 'g'
|
||||
|
||||
[auth.user.email]
|
||||
|
||||
[auth.user.emailDomains]
|
||||
|
||||
[auth.session]
|
||||
[auth.session.accessToken]
|
||||
expiresIn = 900
|
||||
|
||||
[auth.session.refreshToken]
|
||||
expiresIn = 2592000
|
||||
|
||||
[auth.method]
|
||||
[auth.method.anonymous]
|
||||
enabled = false
|
||||
|
||||
[auth.method.emailPasswordless]
|
||||
enabled = false
|
||||
|
||||
[auth.method.emailPassword]
|
||||
hibpEnabled = false
|
||||
emailVerificationRequired = true
|
||||
passwordMinLength = 9
|
||||
|
||||
[auth.method.smsPasswordless]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth]
|
||||
[auth.method.oauth.apple]
|
||||
enabled = true
|
||||
clientId = '{{ secrets.APPLE_CLIENT_ID }}'
|
||||
keyId = '{{ secrets.APPLE_KEY_ID }}'
|
||||
teamId = '{{ secrets.APPLE_TEAM_ID }}'
|
||||
privateKey = '{{ secrets.APPLE_PRIVATE_KEY }}'
|
||||
|
||||
[auth.method.oauth.azuread]
|
||||
tenant = 'common'
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.bitbucket]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.discord]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.facebook]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.github]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.gitlab]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.google]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.linkedin]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.spotify]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.strava]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.twitch]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.twitter]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.windowslive]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.workos]
|
||||
enabled = false
|
||||
|
||||
[auth.method.webauthn]
|
||||
enabled = false
|
||||
|
||||
[auth.method.webauthn.attestation]
|
||||
timeout = 60000
|
||||
|
||||
[auth.totp]
|
||||
enabled = false
|
||||
|
||||
[postgres]
|
||||
version = '14.6-20230406-2'
|
||||
|
||||
[postgres.resources.storage]
|
||||
capacity = 1
|
||||
|
||||
[provider]
|
||||
|
||||
[storage]
|
||||
version = '0.3.4'
|
||||
|
||||
[observability]
|
||||
[observability.grafana]
|
||||
adminPassword = '{{ secrets.GRAFANA_ADMIN_PASSWORD }}'
|
||||
@@ -1,32 +0,0 @@
|
||||
[
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/hasura/version",
|
||||
"value": "v2.25.0-ce"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/global/environment/0",
|
||||
"value": {
|
||||
"name": "ENVIRONMENT",
|
||||
"value": "development"
|
||||
}
|
||||
},
|
||||
{
|
||||
"op": "add",
|
||||
"path": "/global/environment/-",
|
||||
"value": {
|
||||
"name": "FUNCTION_LOG_LEVEL",
|
||||
"value": "debug"
|
||||
}
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/auth/redirections/clientUrl",
|
||||
"value": "http://localhost:3000"
|
||||
},
|
||||
{
|
||||
"op": "remove",
|
||||
"path": "/auth/method/oauth/apple"
|
||||
}
|
||||
]
|
||||
@@ -1,200 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/be/services/mimir/schema"
|
||||
"github.com/nhost/be/services/mimir/schema/appconfig"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v3"
|
||||
jsonpatch "gopkg.in/evanphx/json-patch.v5"
|
||||
)
|
||||
|
||||
func CommandValidate() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "validate",
|
||||
Aliases: []string{},
|
||||
Usage: "Validate configuration",
|
||||
Action: commandValidate,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "Validate this subdomain's configuration. Defaults to linked project",
|
||||
Sources: cli.EnvVars("NHOST_SUBDOMAIN"),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func commandValidate(ctx context.Context, cmd *cli.Command) error {
|
||||
ce := clienv.FromCLI(cmd)
|
||||
|
||||
subdomain := cmd.String(flagSubdomain)
|
||||
if subdomain != "" && subdomain != "local" {
|
||||
proj, err := ce.GetAppInfo(ctx, cmd.String(flagSubdomain))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get app info: %w", err)
|
||||
}
|
||||
|
||||
_, _, err = ValidateRemote(
|
||||
ctx,
|
||||
ce,
|
||||
proj.GetSubdomain(),
|
||||
proj.GetID(),
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
var secrets model.Secrets
|
||||
if err := clienv.UnmarshalFile(ce.Path.Secrets(), &secrets, env.Unmarshal); err != nil {
|
||||
return fmt.Errorf(
|
||||
"failed to parse secrets, make sure secret values are between quotes: %w",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
ce.Infoln("Verifying configuration...")
|
||||
|
||||
if _, err := Validate(ce, "local", secrets); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ce.Infoln("Configuration is valid!")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ApplyJSONPatches[T any](
|
||||
cfg T,
|
||||
overlayPath string,
|
||||
) (*T, error) {
|
||||
f, err := os.Open(overlayPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open json patches file: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
patchesb, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read json patches file: %w", err)
|
||||
}
|
||||
|
||||
cfgb, err := json.Marshal(cfg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal config: %w", err)
|
||||
}
|
||||
|
||||
patch, err := jsonpatch.DecodePatch(patchesb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
|
||||
cfgb, err = patch.Apply(cfgb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
|
||||
var r T
|
||||
if err := json.Unmarshal(cfgb, &r); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal config: %w", err)
|
||||
}
|
||||
|
||||
return &r, nil
|
||||
}
|
||||
|
||||
func Validate(
|
||||
ce *clienv.CliEnv,
|
||||
subdomain string,
|
||||
secrets model.Secrets,
|
||||
) (*model.ConfigConfig, error) {
|
||||
cfg := &model.ConfigConfig{} //nolint:exhaustruct
|
||||
if err := clienv.UnmarshalFile(ce.Path.NhostToml(), cfg, toml.Unmarshal); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse config: %w", err)
|
||||
}
|
||||
|
||||
if clienv.PathExists(ce.Path.Overlay(subdomain)) {
|
||||
var err error
|
||||
|
||||
cfg, err = ApplyJSONPatches(*cfg, ce.Path.Overlay(subdomain))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
schema, err := schema.New()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create schema: %w", err)
|
||||
}
|
||||
|
||||
cfg, err = appconfig.SecretsResolver(cfg, secrets, schema.Fill)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to validate config: %w", err)
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
// ValidateRemote validates the configuration of a remote project by fetching
|
||||
// the secrets and applying them to the configuration. It also applies any
|
||||
// JSON patches from the overlay directory if it exists.
|
||||
// It returns the original configuration with the applied patches (without being filled
|
||||
// and without secrets resolved) and another configuration filled and with secrets resolved.
|
||||
func ValidateRemote(
|
||||
ctx context.Context,
|
||||
ce *clienv.CliEnv,
|
||||
subdomain string,
|
||||
appID string,
|
||||
) (*model.ConfigConfig, *model.ConfigConfig, error) {
|
||||
cfg := &model.ConfigConfig{} //nolint:exhaustruct
|
||||
if err := clienv.UnmarshalFile(ce.Path.NhostToml(), cfg, toml.Unmarshal); err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to parse config: %w", err)
|
||||
}
|
||||
|
||||
if clienv.PathExists(ce.Path.Overlay(subdomain)) {
|
||||
var err error
|
||||
|
||||
cfg, err = ApplyJSONPatches(*cfg, ce.Path.Overlay(subdomain))
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
schema, err := schema.New()
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to create schema: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Getting secrets...")
|
||||
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
secretsResp, err := cl.GetSecrets(
|
||||
ctx,
|
||||
appID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to get secrets: %w", err)
|
||||
}
|
||||
|
||||
secrets := respToSecrets(secretsResp.GetAppSecrets(), false)
|
||||
|
||||
cfgSecrets, err := appconfig.SecretsResolver(cfg, secrets, schema.Fill)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to validate config: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Config is valid!")
|
||||
|
||||
return cfg, cfgSecrets, nil
|
||||
}
|
||||
@@ -1,288 +0,0 @@
|
||||
package config_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/cmd/config"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
)
|
||||
|
||||
func ptr[T any](t T) *T {
|
||||
return &t
|
||||
}
|
||||
|
||||
func expectedConfig() *model.ConfigConfig {
|
||||
//nolint:exhaustruct
|
||||
return &model.ConfigConfig{
|
||||
Global: &model.ConfigGlobal{
|
||||
Environment: []*model.ConfigGlobalEnvironmentVariable{
|
||||
{Name: "ENVIRONMENT", Value: "development"},
|
||||
{Name: "FUNCTION_LOG_LEVEL", Value: "debug"},
|
||||
},
|
||||
},
|
||||
Hasura: &model.ConfigHasura{
|
||||
Version: ptr("v2.25.0-ce"),
|
||||
JwtSecrets: []*model.ConfigJWTSecret{
|
||||
{
|
||||
Type: ptr("HS256"),
|
||||
Key: ptr("0f987876650b4a085e64594fae9219e7781b17506bec02489ad061fba8cb22db"),
|
||||
},
|
||||
},
|
||||
AdminSecret: "nhost-admin-secret",
|
||||
WebhookSecret: "nhost-webhook-secret",
|
||||
Settings: &model.ConfigHasuraSettings{
|
||||
CorsDomain: []string{"*"},
|
||||
DevMode: ptr(true),
|
||||
EnableAllowList: ptr(false),
|
||||
EnableConsole: ptr(true),
|
||||
EnableRemoteSchemaPermissions: new(bool),
|
||||
EnabledAPIs: []string{
|
||||
"metadata",
|
||||
"graphql",
|
||||
"pgdump",
|
||||
"config",
|
||||
},
|
||||
InferFunctionPermissions: ptr(true),
|
||||
LiveQueriesMultiplexedRefetchInterval: ptr(uint32(1000)),
|
||||
StringifyNumericTypes: ptr(false),
|
||||
},
|
||||
Logs: &model.ConfigHasuraLogs{Level: ptr("warn")},
|
||||
Events: &model.ConfigHasuraEvents{HttpPoolSize: ptr(uint32(100))},
|
||||
},
|
||||
Functions: &model.ConfigFunctions{Node: &model.ConfigFunctionsNode{Version: ptr(22)}},
|
||||
Auth: &model.ConfigAuth{
|
||||
Version: ptr("0.20.0"),
|
||||
Misc: &model.ConfigAuthMisc{
|
||||
ConcealErrors: ptr(false),
|
||||
},
|
||||
ElevatedPrivileges: &model.ConfigAuthElevatedPrivileges{
|
||||
Mode: ptr("disabled"),
|
||||
},
|
||||
Redirections: &model.ConfigAuthRedirections{
|
||||
ClientUrl: ptr("http://localhost:3000"),
|
||||
AllowedUrls: []string{},
|
||||
},
|
||||
SignUp: &model.ConfigAuthSignUp{
|
||||
Enabled: ptr(true),
|
||||
DisableNewUsers: ptr(false),
|
||||
},
|
||||
User: &model.ConfigAuthUser{
|
||||
Roles: &model.ConfigAuthUserRoles{
|
||||
Default: ptr("user"),
|
||||
Allowed: []string{"user", "me"},
|
||||
},
|
||||
Locale: &model.ConfigAuthUserLocale{
|
||||
Default: ptr("en"),
|
||||
Allowed: []string{"en"},
|
||||
},
|
||||
Gravatar: &model.ConfigAuthUserGravatar{
|
||||
Enabled: ptr(true),
|
||||
Default: ptr("blank"),
|
||||
Rating: ptr("g"),
|
||||
},
|
||||
Email: &model.ConfigAuthUserEmail{
|
||||
Allowed: []string{},
|
||||
Blocked: []string{},
|
||||
},
|
||||
EmailDomains: &model.ConfigAuthUserEmailDomains{
|
||||
Allowed: []string{},
|
||||
Blocked: []string{},
|
||||
},
|
||||
},
|
||||
Session: &model.ConfigAuthSession{
|
||||
AccessToken: &model.ConfigAuthSessionAccessToken{
|
||||
ExpiresIn: ptr(uint32(900)),
|
||||
CustomClaims: []*model.ConfigAuthsessionaccessTokenCustomClaims{},
|
||||
},
|
||||
RefreshToken: &model.ConfigAuthSessionRefreshToken{
|
||||
ExpiresIn: ptr(uint32(2592000)),
|
||||
},
|
||||
},
|
||||
Method: &model.ConfigAuthMethod{
|
||||
Anonymous: &model.ConfigAuthMethodAnonymous{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Otp: &model.ConfigAuthMethodOtp{
|
||||
Email: &model.ConfigAuthMethodOtpEmail{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
},
|
||||
EmailPasswordless: &model.ConfigAuthMethodEmailPasswordless{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
EmailPassword: &model.ConfigAuthMethodEmailPassword{
|
||||
HibpEnabled: ptr(false),
|
||||
EmailVerificationRequired: ptr(true),
|
||||
PasswordMinLength: ptr(uint8(9)),
|
||||
},
|
||||
SmsPasswordless: &model.ConfigAuthMethodSmsPasswordless{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Oauth: &model.ConfigAuthMethodOauth{
|
||||
Apple: &model.ConfigAuthMethodOauthApple{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Azuread: &model.ConfigAuthMethodOauthAzuread{
|
||||
Enabled: ptr(false),
|
||||
Tenant: ptr("common"),
|
||||
},
|
||||
Bitbucket: &model.ConfigStandardOauthProvider{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Discord: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Entraid: &model.ConfigAuthMethodOauthEntraid{
|
||||
Enabled: ptr(false),
|
||||
Tenant: ptr("common"),
|
||||
},
|
||||
Facebook: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Github: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Gitlab: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Google: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Linkedin: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Spotify: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Strava: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Twitch: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Twitter: &model.ConfigAuthMethodOauthTwitter{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Windowslive: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Workos: &model.ConfigAuthMethodOauthWorkos{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
},
|
||||
Webauthn: &model.ConfigAuthMethodWebauthn{
|
||||
Enabled: ptr(false),
|
||||
RelyingParty: nil,
|
||||
Attestation: &model.ConfigAuthMethodWebauthnAttestation{
|
||||
Timeout: ptr(uint32(60000)),
|
||||
},
|
||||
},
|
||||
},
|
||||
Totp: &model.ConfigAuthTotp{Enabled: ptr(false)},
|
||||
RateLimit: &model.ConfigAuthRateLimit{
|
||||
Emails: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "1h",
|
||||
},
|
||||
Sms: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "1h",
|
||||
},
|
||||
BruteForce: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Signups: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Global: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "1m",
|
||||
},
|
||||
},
|
||||
},
|
||||
Postgres: &model.ConfigPostgres{
|
||||
Version: ptr("14.6-20230406-2"),
|
||||
Resources: &model.ConfigPostgresResources{
|
||||
Storage: &model.ConfigPostgresResourcesStorage{
|
||||
Capacity: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
Provider: &model.ConfigProvider{},
|
||||
Storage: &model.ConfigStorage{Version: ptr("0.3.4")},
|
||||
Observability: &model.ConfigObservability{
|
||||
Grafana: &model.ConfigGrafana{
|
||||
AdminPassword: "grafana-admin-password",
|
||||
Smtp: nil,
|
||||
Alerting: &model.ConfigGrafanaAlerting{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Contacts: &model.ConfigGrafanaContacts{},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidate(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
path string
|
||||
expected func() *model.ConfigConfig
|
||||
applyPatches bool
|
||||
}{
|
||||
{
|
||||
name: "applypatches",
|
||||
path: "success",
|
||||
expected: expectedConfig,
|
||||
applyPatches: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ce := clienv.New(
|
||||
os.Stdout,
|
||||
os.Stderr,
|
||||
clienv.NewPathStructure(
|
||||
".",
|
||||
filepath.Join("testdata", "validate", tc.path),
|
||||
filepath.Join("testdata", "validate", tc.path, ".nhost"),
|
||||
filepath.Join("testdata", "validate", tc.path, "nhost"),
|
||||
),
|
||||
"fakeauthurl",
|
||||
"fakegraphqlurl",
|
||||
"fakebranch",
|
||||
"",
|
||||
"local",
|
||||
)
|
||||
|
||||
var secrets model.Secrets
|
||||
if err := clienv.UnmarshalFile(ce.Path.Secrets(), &secrets, env.Unmarshal); err != nil {
|
||||
t.Fatalf(
|
||||
"failed to parse secrets, make sure secret values are between quotes: %s",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
cfg, err := config.Validate(ce, "local", secrets)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(tc.expected(), cfg); diff != "" {
|
||||
t.Errorf("%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user