Compare commits
118 Commits
@nhost/rea
...
cli@1.32.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c820d4173 | ||
|
|
72401ae1a7 | ||
|
|
73b60a14e5 | ||
|
|
f4ce851abe | ||
|
|
cc88dbc4bd | ||
|
|
a6a5ecdad9 | ||
|
|
069896c2d9 | ||
|
|
754541a24b | ||
|
|
e7978b0346 | ||
|
|
894cd29d6b | ||
|
|
d570084d24 | ||
|
|
8c71dd9db9 | ||
|
|
c6006fec30 | ||
|
|
69c2954658 | ||
|
|
be6af4f157 | ||
|
|
3a41251caf | ||
|
|
c6af08fde4 | ||
|
|
63d73e639c | ||
|
|
8112625a0a | ||
|
|
bbf1f6c11d | ||
|
|
d37f31fc41 | ||
|
|
0367dfae00 | ||
|
|
6ad1cfcb13 | ||
|
|
25c0ffa83b | ||
|
|
cc98f33440 | ||
|
|
8812d9dcaf | ||
|
|
bf17981596 | ||
|
|
2f4b3768c7 | ||
|
|
73a7ba82ae | ||
|
|
ba3c49e443 | ||
|
|
88836f3b1f | ||
|
|
81716d9d9c | ||
|
|
a30da08e9b | ||
|
|
397bfc948c | ||
|
|
0d183761ae | ||
|
|
1902a114ec | ||
|
|
92e71a61f9 | ||
|
|
9790bcfe3e | ||
|
|
811b48eccf | ||
|
|
57987ed3a9 | ||
|
|
7f0db210ba | ||
|
|
d8c5117046 | ||
|
|
7633d04121 | ||
|
|
e8a378906a | ||
|
|
34ede5cf2c | ||
|
|
2deeb39a28 | ||
|
|
d98e73e57e | ||
|
|
4c6400fc52 | ||
|
|
c4f383f695 | ||
|
|
1708578f8f | ||
|
|
96228dfe69 | ||
|
|
2f5bc04e0c | ||
|
|
06b47e0fb9 | ||
|
|
412692c2f6 | ||
|
|
89f6fe6346 | ||
|
|
2e34d7b9d0 | ||
|
|
66e0cc8261 | ||
|
|
7eb9539807 | ||
|
|
906620a755 | ||
|
|
5e9ddb41d2 | ||
|
|
00132bd961 | ||
|
|
57b26152e4 | ||
|
|
5565451f18 | ||
|
|
4b18e02ad2 | ||
|
|
181c0ab19d | ||
|
|
939a158917 | ||
|
|
9c0a118721 | ||
|
|
129ec1edfc | ||
|
|
40439b9987 | ||
|
|
cffa161da7 | ||
|
|
4ffff86752 | ||
|
|
f9e170e958 | ||
|
|
b8cb491ab1 | ||
|
|
59249e5161 | ||
|
|
df6b85e98c | ||
|
|
85316e822f | ||
|
|
f7d7080dad | ||
|
|
ec24567d83 | ||
|
|
56c87dad64 | ||
|
|
47ab341ce4 | ||
|
|
5fed49e05b | ||
|
|
aee9a80ac8 | ||
|
|
5ef3f76ea0 | ||
|
|
4ca9641304 | ||
|
|
fd3b5c77e4 | ||
|
|
9ed8ce8a5e | ||
|
|
e7762cb2b5 | ||
|
|
e353d99de8 | ||
|
|
c4d289a4d5 | ||
|
|
e2065e22df | ||
|
|
d738884d7d | ||
|
|
b50404566f | ||
|
|
8caf3daa54 | ||
|
|
8a07613cbe | ||
|
|
736862c9cc | ||
|
|
ea99fb31d7 | ||
|
|
70433187cc | ||
|
|
39b10a2e9f | ||
|
|
4b8478004e | ||
|
|
61eb6cdc2d | ||
|
|
14187d381f | ||
|
|
99b78f147e | ||
|
|
2aa81a6cb9 | ||
|
|
a1edaf18ea | ||
|
|
4d835c4b9c | ||
|
|
44a3e6bd41 | ||
|
|
6ee2d1f5bf | ||
|
|
df51c3e64e | ||
|
|
9acae7d1c4 | ||
|
|
f6947a2194 | ||
|
|
31e636a9c8 | ||
|
|
0fdff345ac | ||
|
|
97db63791b | ||
|
|
a0931e282f | ||
|
|
e87505c564 | ||
|
|
c0635ae1c7 | ||
|
|
d2a9a9ae1d | ||
|
|
c97b43f149 |
@@ -1,9 +0,0 @@
|
||||
# Changesets
|
||||
|
||||
Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool
|
||||
that works with multi-package repos, or single-package repos to help you version and publish your
|
||||
code. You can find the full documentation for it
|
||||
[in our repository](https://github.com/changesets/changesets)
|
||||
|
||||
We have a quick list of common questions to get you started engaging with this project in
|
||||
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"$schema": "https://unpkg.com/@changesets/config@1.6.0/schema.json",
|
||||
"changelog": "@changesets/cli/changelog",
|
||||
"commit": false,
|
||||
"linked": [],
|
||||
"access": "restricted",
|
||||
"baseBranch": "main",
|
||||
"updateInternalDependencies": "patch"
|
||||
}
|
||||
14
.github/CODEOWNERS
vendored
14
.github/CODEOWNERS
vendored
@@ -1,14 +0,0 @@
|
||||
# Documentation
|
||||
# https://help.github.com/en/articles/about-code-owners
|
||||
|
||||
/packages @nunopato @onehassan
|
||||
/packages/docgen @nunopato @onehassan
|
||||
/integrations/stripe-graphql-js @nunopato @onehassan
|
||||
/.github @nunopato @onehassan
|
||||
/dashboard/ @nunopato @onehassan
|
||||
/docs/ @nunopato @onehassan
|
||||
/config/ @nunopato @onehassan
|
||||
/examples/ @nunopato @onehassan
|
||||
/examples/codegen-react-apollo @nunopato @onehassan
|
||||
/examples/codegen-react-query @nunopato @onehassan
|
||||
/examples/react-apollo-crm @nunopato @onehassan
|
||||
41
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
41
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
### Checklist
|
||||
|
||||
- [ ] No breaking changes
|
||||
- [ ] Tests pass
|
||||
- [ ] New features have new tests
|
||||
- [ ] Documentation is updated (if applicable)
|
||||
- [ ] Title of the PR is in the correct format (see below)
|
||||
|
||||
--- Delete everything below this line before submitting your PR ---
|
||||
|
||||
### PR title format
|
||||
|
||||
The PR title must follow the following pattern:
|
||||
|
||||
`TYPE(PKG): SUMMARY`
|
||||
|
||||
Where `TYPE` is:
|
||||
|
||||
- feat: mark this pull request as a feature
|
||||
- fix: mark this pull request as a bug fix
|
||||
- chore: mark this pull request as a maintenance item
|
||||
|
||||
Where `PKG` is:
|
||||
|
||||
- `ci`: For general changes to the build and/or CI/CD pipeline
|
||||
- `cli`: For changes to the Nhost CLI
|
||||
- `codegen`: For changes to the code generator
|
||||
- `dashboard`: For changes to the Nhost Dashboard
|
||||
- `deps`: For changes to dependencies
|
||||
- `docs`: For changes to the documentation
|
||||
- `examples`: For changes to the examples
|
||||
- `mintlify-openapi`: For changes to the Mintlify OpenAPI tool
|
||||
- `nhost-js`: For changes to the Nhost JavaScript SDK
|
||||
- `nixops`: For changes to the NixOps
|
||||
|
||||
Where `SUMMARY` is a short description of what the PR does.
|
||||
|
||||
### Tests
|
||||
|
||||
- please make sure your changes pass the current tests (Use the `make test`
|
||||
- if you are introducing a new feature, please write as much tests as possible.
|
||||
29
.github/actions/discord-notification/action.yml
vendored
Normal file
29
.github/actions/discord-notification/action.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: 'Discord Notification'
|
||||
description: 'Send a Discord notification with conditional check'
|
||||
|
||||
inputs:
|
||||
webhook-url:
|
||||
description: 'Discord webhook URL'
|
||||
required: true
|
||||
title:
|
||||
description: 'Embed title'
|
||||
required: true
|
||||
description:
|
||||
description: 'Embed description'
|
||||
required: true
|
||||
color:
|
||||
description: 'Embed color (decimal number)'
|
||||
required: false
|
||||
default: '5763719'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Send Discord notification
|
||||
if: ${{ inputs.webhook-url }}
|
||||
uses: tsickert/discord-webhook@v7.0.0
|
||||
with:
|
||||
webhook-url: ${{ inputs.webhook-url }}
|
||||
embed-title: ${{ inputs.title }}
|
||||
embed-description: ${{ inputs.description }}
|
||||
embed-color: ${{ inputs.color }}
|
||||
59
.github/actions/install-dependencies/action.yaml
vendored
59
.github/actions/install-dependencies/action.yaml
vendored
@@ -1,59 +0,0 @@
|
||||
name: Install Node and package dependencies
|
||||
description: 'Install Node dependencies with pnpm'
|
||||
inputs:
|
||||
TURBO_TOKEN:
|
||||
description: 'Turborepo token'
|
||||
TURBO_TEAM:
|
||||
description: 'Turborepo team'
|
||||
BUILD:
|
||||
description: 'Build packages'
|
||||
default: 'default'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10.1.0
|
||||
run_install: false
|
||||
- name: Get pnpm cache directory
|
||||
id: pnpm-cache-dir
|
||||
shell: bash
|
||||
run: echo "dir=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
- uses: actions/cache@v4
|
||||
id: pnpm-cache
|
||||
with:
|
||||
path: ${{ steps.pnpm-cache-dir.outputs.dir }}
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
restore-keys: ${{ runner.os }}-node-
|
||||
- name: Use Node.js v20
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 20
|
||||
- shell: bash
|
||||
name: Use Latest Corepack
|
||||
run: |
|
||||
echo "Before: corepack version => $(corepack --version || echo 'not installed')"
|
||||
npm install -g corepack@latest
|
||||
echo "After : corepack version => $(corepack --version)"
|
||||
corepack enable
|
||||
pnpm --version
|
||||
- shell: bash
|
||||
name: Install packages
|
||||
run: pnpm install --frozen-lockfile
|
||||
# * Build all Nhost packages as they are all supposed to be tested.
|
||||
# * They are reused through the Turborepo cache
|
||||
- shell: bash
|
||||
name: Build packages
|
||||
if: ${{ inputs.BUILD == 'all' }}
|
||||
run: pnpm run build:all
|
||||
env:
|
||||
TURBO_TOKEN: ${{ inputs.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ inputs.TURBO_TEAM }}
|
||||
- shell: bash
|
||||
name: Build everything in the monorepo
|
||||
if: ${{ inputs.BUILD == 'default' }}
|
||||
run: pnpm run build
|
||||
env:
|
||||
TURBO_TOKEN: ${{ inputs.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ inputs.TURBO_TEAM }}
|
||||
108
.github/actions/nhost-cli/README.md
vendored
108
.github/actions/nhost-cli/README.md
vendored
@@ -1,108 +0,0 @@
|
||||
# Nhost CLI GitHub Action
|
||||
|
||||
## Usage
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
```
|
||||
|
||||
### Install the CLI and start the app
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI and start the application
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
start: true
|
||||
```
|
||||
|
||||
### Set another working directory
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
path: examples/react-apollo
|
||||
start: true
|
||||
```
|
||||
|
||||
### Don't wait for the app to be ready
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI and start app
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
start: true
|
||||
wait: false
|
||||
```
|
||||
|
||||
### Stop the app
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Start app
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
start: true
|
||||
- name: Do something
|
||||
cmd: echo "do something"
|
||||
- name: Stop
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
stop: true
|
||||
```
|
||||
|
||||
### Install a given value of the CLI
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
version: v0.8.10
|
||||
```
|
||||
|
||||
### Inject values into nhost/config.yaml
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
config: |
|
||||
services:
|
||||
auth:
|
||||
image: nhost/hasura-auth:0.16.1
|
||||
```
|
||||
84
.github/actions/nhost-cli/action.yaml
vendored
84
.github/actions/nhost-cli/action.yaml
vendored
@@ -1,84 +0,0 @@
|
||||
name: Nhost CLI
|
||||
description: 'Action to install the Nhost CLI and to run an application'
|
||||
inputs:
|
||||
init:
|
||||
description: 'Initialize the application'
|
||||
default: 'false'
|
||||
start:
|
||||
description: "Start the application. If false, the application won't be started"
|
||||
default: 'false'
|
||||
wait:
|
||||
description: 'If starting the application, wait until it is ready'
|
||||
default: 'true'
|
||||
stop:
|
||||
description: 'Stop the application'
|
||||
default: 'false'
|
||||
path:
|
||||
description: 'Path to the application'
|
||||
default: '.'
|
||||
version:
|
||||
description: 'Version of the Nhost CLI'
|
||||
default: 'latest'
|
||||
dashboard-image:
|
||||
description: 'Image of the dashboard'
|
||||
default: 'nhost/dashboard:latest'
|
||||
config:
|
||||
description: 'Values to be injected into nhost/config.yaml'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Check if Nhost CLI is already installed
|
||||
id: check-nhost-cli
|
||||
shell: bash
|
||||
# TODO check if the version is the same
|
||||
run: |
|
||||
if [ -z "$(which nhost)" ]
|
||||
then
|
||||
echo "installed=false" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "installed=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Install Nhost CLI
|
||||
if: ${{ steps.check-nhost-cli.outputs.installed == 'false' }}
|
||||
uses: nick-fields/retry@v2
|
||||
with:
|
||||
timeout_minutes: 3
|
||||
max_attempts: 10
|
||||
command: bash <(curl --silent -L https://raw.githubusercontent.com/nhost/cli/main/get.sh) ${{ inputs.version }}
|
||||
- name: Initialize a new project from scratch
|
||||
if: ${{ inputs.init == 'true' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: |
|
||||
rm -rf ./*
|
||||
nhost init
|
||||
- name: Set custom configuration
|
||||
if: ${{ inputs.config }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: config="${{ inputs.config }}" yq -i '. *= env(config)' nhost/config.yaml
|
||||
- name: Start the application
|
||||
if: ${{ inputs.start == 'true' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: |
|
||||
if [ -n "${{ inputs.dashboard-image }}" ]; then
|
||||
export NHOST_DASHBOARD_VERSION=${{ inputs.dashboard-image }}
|
||||
fi
|
||||
if [ -f .secrets.example ]; then
|
||||
cp .secrets.example .secrets
|
||||
fi
|
||||
nhost up
|
||||
- name: Log on failure
|
||||
if: steps.wait.outcome == 'failure'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: |
|
||||
nhost logs
|
||||
exit 1
|
||||
- name: Stop the application
|
||||
if: ${{ inputs.stop == 'true' }}
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.path }}
|
||||
run: nhost down
|
||||
24
.github/labeler.yml
vendored
24
.github/labeler.yml
vendored
@@ -1,24 +0,0 @@
|
||||
dashboard:
|
||||
- dashboard/**/*
|
||||
|
||||
documentation:
|
||||
- any:
|
||||
- docs/**/*
|
||||
|
||||
examples:
|
||||
- examples/**/*
|
||||
|
||||
sdk:
|
||||
- packages/**/*
|
||||
|
||||
integrations:
|
||||
- integrations/**/*
|
||||
|
||||
react:
|
||||
- '{packages,examples,integrations}/*react*/**/*'
|
||||
|
||||
nextjs:
|
||||
- '{packages,examples}/*next*/**/*'
|
||||
|
||||
vue:
|
||||
- '{packages,examples,integrations}/*vue*/**/*'
|
||||
157
.github/workflows/changesets.yaml
vendored
157
.github/workflows/changesets.yaml
vendored
@@ -1,157 +0,0 @@
|
||||
name: Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- 'examples/**'
|
||||
- 'assets/**'
|
||||
- '**.md'
|
||||
- '!.changeset/**'
|
||||
- 'LICENSE'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: nhost
|
||||
DASHBOARD_PACKAGE: '@nhost/dashboard'
|
||||
|
||||
jobs:
|
||||
version:
|
||||
name: Version
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
hasChangesets: ${{ steps.changesets.outputs.hasChangesets }}
|
||||
dashboardVersion: ${{ steps.dashboard.outputs.dashboardVersion }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- name: Create PR or Publish release
|
||||
id: changesets
|
||||
uses: changesets/action@v1
|
||||
with:
|
||||
version: pnpm run ci:version
|
||||
commit: 'chore: update versions'
|
||||
title: 'chore: update versions'
|
||||
publish: pnpm run release
|
||||
createGithubReleases: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- name: Check Dashboard tag
|
||||
id: dashboard
|
||||
if: steps.changesets.outputs.hasChangesets == 'false'
|
||||
run: |
|
||||
DASHBOARD_VERSION=$(jq -r .version dashboard/package.json)
|
||||
GIT_TAG="${{ env.DASHBOARD_PACKAGE}}@$DASHBOARD_VERSION"
|
||||
if [ -z "$(git tag -l | grep $GIT_TAG)" ]; then
|
||||
echo "dashboardVersion=$DASHBOARD_VERSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
test:
|
||||
needs: version
|
||||
name: Dashboard
|
||||
if: needs.version.outputs.dashboardVersion != ''
|
||||
uses: ./.github/workflows/dashboard.yaml
|
||||
secrets: inherit
|
||||
|
||||
publish-vercel:
|
||||
name: Publish to Vercel
|
||||
needs:
|
||||
- test
|
||||
uses: ./.github/workflows/deploy-dashboard.yaml
|
||||
with:
|
||||
git_ref: ${{ github.ref_name }}
|
||||
environment: production
|
||||
secrets: inherit
|
||||
|
||||
publish-docker:
|
||||
name: Publish to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- test
|
||||
- version
|
||||
- publish-vercel
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Add git tag
|
||||
run: |
|
||||
git tag "${{ env.DASHBOARD_PACKAGE }}@${{ needs.version.outputs.dashboardVersion }}"
|
||||
git push origin --tags
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: |
|
||||
nhost/dashboard
|
||||
tags: |
|
||||
type=raw,value=latest,enable=true
|
||||
type=semver,pattern={{version}},value=v${{ needs.version.outputs.dashboardVersion }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=v${{ needs.version.outputs.dashboardVersion }}
|
||||
type=semver,pattern={{major}},value=v${{ needs.version.outputs.dashboardVersion }}
|
||||
type=sha
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push to Docker Hub
|
||||
uses: docker/build-push-action@v4
|
||||
timeout-minutes: 90
|
||||
with:
|
||||
context: .
|
||||
file: ./dashboard/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
TURBO_TOKEN=${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM=${{ env.TURBO_TEAM }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
push: true
|
||||
|
||||
bump-cli:
|
||||
name: Bump Dashboard version in the Nhost CLI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- version
|
||||
- publish-docker
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: nhost/cli
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
fetch-depth: 0
|
||||
- name: Bump version in source code
|
||||
run: |
|
||||
IMAGE=$(echo ${{ env.DASHBOARD_PACKAGE }} | sed 's/@\(.\+\)\/\(.\+\)/\1\\\/\2/g')
|
||||
VERSION="${{ needs.version.outputs.dashboardVersion }}"
|
||||
EXPRESSION='s/"'$IMAGE':[0-9]\+\.[0-9]\+\.[0-9]\+"/"'$IMAGE':'$VERSION'"/g'
|
||||
find ./ -type f -exec sed -i -e $EXPRESSION {} \;
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
commit-message: 'chore: bump nhost/dashboard to ${{ needs.version.outputs.dashboardVersion }}'
|
||||
branch: bump-dashboard-version
|
||||
delete-branch: true
|
||||
title: 'chore: bump nhost/dashboard to ${{ needs.version.outputs.dashboardVersion }}'
|
||||
body: |
|
||||
This PR bumps the Nhost Dashboard Docker image to version ${{ needs.version.outputs.dashboardVersion }}.
|
||||
209
.github/workflows/ci.yaml
vendored
209
.github/workflows/ci.yaml
vendored
@@ -1,209 +0,0 @@
|
||||
name: Continuous Integration
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- 'assets/**'
|
||||
- '**.md'
|
||||
- 'LICENSE'
|
||||
- 'docs/**'
|
||||
pull_request:
|
||||
types: [opened, synchronize]
|
||||
paths-ignore:
|
||||
- 'assets/**'
|
||||
- '**.md'
|
||||
- 'LICENSE'
|
||||
- 'docs/**'
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: nhost
|
||||
NEXT_PUBLIC_ENV: dev
|
||||
NEXT_TELEMETRY_DISABLED: 1
|
||||
NHOST_TEST_DASHBOARD_URL: ${{ vars.NHOST_TEST_DASHBOARD_URL }}
|
||||
NHOST_TEST_PROJECT_NAME: ${{ vars.NHOST_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_NAME: ${{ vars.NHOST_TEST_ORGANIZATION_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_SLUG: ${{ vars.NHOST_TEST_ORGANIZATION_SLUG }}
|
||||
NHOST_TEST_PERSONAL_ORG_SLUG: ${{ vars.NHOST_TEST_PERSONAL_ORG_SLUG }}
|
||||
NHOST_TEST_PROJECT_SUBDOMAIN: ${{ vars.NHOST_TEST_PROJECT_SUBDOMAIN }}
|
||||
NHOST_PRO_TEST_PROJECT_NAME: ${{ vars.NHOST_PRO_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_USER_EMAIL: ${{ secrets.NHOST_TEST_USER_EMAIL }}
|
||||
NHOST_TEST_USER_PASSWORD: ${{ secrets.NHOST_TEST_USER_PASSWORD }}
|
||||
NHOST_TEST_PROJECT_ADMIN_SECRET: ${{ secrets.NHOST_TEST_PROJECT_ADMIN_SECRET }}
|
||||
NHOST_TEST_FREE_USER_EMAILS: ${{ secrets.NHOST_TEST_FREE_USER_EMAILS }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build @nhost packages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# * Install Node and dependencies. Package downloads will be cached for the next jobs.
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
BUILD: 'all'
|
||||
- name: Check if the pnpm lockfile changed
|
||||
id: changed-lockfile
|
||||
uses: tj-actions/changed-files@v37
|
||||
with:
|
||||
files: pnpm-lock.yaml
|
||||
# * Determine a pnpm filter argument for packages that have been modified.
|
||||
# * If the lockfile has changed, we don't filter anything in order to run all the e2e tests.
|
||||
- name: filter packages
|
||||
id: filter-packages
|
||||
if: steps.changed-lockfile.outputs.any_changed != 'true' && github.event_name == 'pull_request'
|
||||
run: echo "filter=${{ format('--filter=...[origin/{0}]', github.base_ref) }}" >> $GITHUB_OUTPUT
|
||||
# * List packagesthat has an `e2e` script, except the root, and return an array of their name and path
|
||||
# * In a PR, only include packages that have been modified, and their dependencies
|
||||
- name: List examples with an e2e script
|
||||
id: set-matrix
|
||||
run: |
|
||||
PACKAGES=$(pnpm recursive list --depth -1 --parseable --filter='!nhost-root' ${{ steps.filter-packages.outputs.filter }} \
|
||||
| xargs -I@ realpath --relative-to=$PWD @ \
|
||||
| xargs -I@ jq "if (.scripts.e2e | length) != 0 then {name: .name, path: \"@\"} else null end" @/package.json \
|
||||
| awk "!/null/" \
|
||||
| jq -c --slurp 'map(select(length > 0))')
|
||||
echo "matrix=$PACKAGES" >> $GITHUB_OUTPUT
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
|
||||
unit:
|
||||
name: Unit tests
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# * Install Node and dependencies. Package dependencies won't be downloaded again as they have been cached by the `build` job.
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
# * Run every `test` script in the workspace . Dependencies build is cached by Turborepo
|
||||
- name: Run unit tests
|
||||
run: pnpm run test:all
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
files: '**/coverage/coverage-final.json'
|
||||
name: codecov-umbrella
|
||||
- name: Create summary
|
||||
run: |
|
||||
echo '### Code coverage' >> $GITHUB_STEP_SUMMARY
|
||||
echo 'Visit [codecov](https://app.codecov.io/gh/nhost/nhost/) to see the code coverage reports' >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# * Install Node and dependencies. Package dependencies won't be downloaded again as they have been cached by the `build` job.
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- name: Enforce Prettier formatting in dashboard
|
||||
working-directory: ./dashboard
|
||||
run: pnpm prettier --check "./**/*.tsx" --config prettier.config.js
|
||||
# * Run every `lint` script in the workspace . Dependencies build is cached by Turborepo
|
||||
- name: Lint
|
||||
run: pnpm run lint:all
|
||||
- name: Audit for vulnerabilities
|
||||
run: pnpx audit-ci --config ./audit-ci.jsonc
|
||||
|
||||
e2e:
|
||||
name: 'E2E (Package: ${{ matrix.package.path }})'
|
||||
needs: build
|
||||
if: ${{ needs.build.outputs.matrix != '[]' && needs.build.outputs.matrix != '' }}
|
||||
strategy:
|
||||
# * Don't cancel other matrices when one fails
|
||||
fail-fast: false
|
||||
matrix:
|
||||
package: ${{ fromJson(needs.build.outputs.matrix) }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# * Install Node and dependencies. Package dependencies won't be downloaded again as they have been cached by the `build` job.
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
# * Build Dashboard image to test it locally
|
||||
- name: Build Dashboard local image
|
||||
if: matrix.package.path == 'dashboard'
|
||||
run: |
|
||||
docker build -t nhost/dashboard:0.0.0-dev -f ${{ matrix.package.path }}/Dockerfile .
|
||||
mkdir -p nhost-test-project
|
||||
# * Install Nhost CLI if a `nhost/config.yaml` file is found
|
||||
- name: Install Nhost CLI
|
||||
if: hashFiles(format('{0}/nhost/config.yaml', matrix.package.path)) != '' && matrix.package.path != 'dashboard'
|
||||
uses: ./.github/actions/nhost-cli
|
||||
# * Install Nhost CLI to test Dashboard locally
|
||||
- name: Install Nhost CLI (Local Dashboard tests)
|
||||
timeout-minutes: 5
|
||||
if: matrix.package.path == 'dashboard'
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
init: 'true' # Initialize the application
|
||||
start: 'true' # Start the application
|
||||
path: ./nhost-test-project
|
||||
wait: 'true' # Wait until the application is ready
|
||||
dashboard-image: 'nhost/dashboard:0.0.0-dev'
|
||||
- name: Fetch Dashboard Preview URL
|
||||
id: fetch-dashboard-preview-url
|
||||
uses: zentered/vercel-preview-url@v1.1.9
|
||||
if: github.ref_name != 'main'
|
||||
env:
|
||||
VERCEL_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
GITHUB_REF: ${{ github.ref_name }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
with:
|
||||
vercel_team_id: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
vercel_project_id: ${{ secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
vercel_state: BUILDING,READY,INITIALIZING
|
||||
- name: Set Dashboard Preview URL
|
||||
if: steps.fetch-dashboard-preview-url.outputs.preview_url != ''
|
||||
run: echo "NHOST_TEST_DASHBOARD_URL=https://${{ steps.fetch-dashboard-preview-url.outputs.preview_url }}" >> $GITHUB_ENV
|
||||
- name: Run Upgrade project Dashboard e2e tests
|
||||
if: matrix.package.path == 'dashboard'
|
||||
timeout-minutes: 10
|
||||
run: pnpm --filter="${{ matrix.package.name }}" run e2e:upgrade-project
|
||||
# * Run the `ci` script of the current package of the matrix. Dependencies build is cached by Turborepo
|
||||
- name: Run e2e tests
|
||||
timeout-minutes: 20
|
||||
run: pnpm --filter="${{ matrix.package.name }}" run e2e
|
||||
# * Run the `e2e-local` script of the dashboard
|
||||
- name: Run Local Dashboard e2e tests
|
||||
if: matrix.package.path == 'dashboard'
|
||||
timeout-minutes: 5
|
||||
run: pnpm --filter="${{ matrix.package.name }}" run e2e:local
|
||||
|
||||
- name: Stop Nhost CLI
|
||||
if: matrix.package.path == 'dashboard'
|
||||
working-directory: ./nhost-test-project
|
||||
run: nhost down
|
||||
- name: Stop Nhost CLI for packages
|
||||
if: always() && (matrix.package.path == 'packages/hasura-auth-js' || matrix.package.path == 'packages/hasura-storage-js')
|
||||
working-directory: ./${{ matrix.package.path }}
|
||||
run: nhost down
|
||||
- id: file-name
|
||||
if: ${{ failure() }}
|
||||
name: Transform package name into a valid file name
|
||||
run: |
|
||||
PACKAGE_FILE_NAME=$(echo "${{ matrix.package.name }}" | sed 's/@//g; s/\//-/g')
|
||||
echo "fileName=$PACKAGE_FILE_NAME" >> $GITHUB_OUTPUT
|
||||
# * Run this step only if the previous step failed, and Playwright generated a report
|
||||
- name: Upload Playwright Report
|
||||
if: ${{ failure() && hashFiles(format('{0}/playwright-report/**', matrix.package.path)) != ''}}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: playwright-${{ steps.file-name.outputs.fileName }}
|
||||
path: ${{format('{0}/playwright-report/**', matrix.package.path)}}
|
||||
95
.github/workflows/ci_create_release.yaml
vendored
Normal file
95
.github/workflows/ci_create_release.yaml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
---
|
||||
name: "ci: create release"
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
create-release:
|
||||
if: github.event.pull_request.merged == true && startsWith(github.event.pull_request.title, 'release(')
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
pull-requests: read
|
||||
actions: write
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-cliff-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-cliff-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-cliff-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
- name: "Extract project and version from PR title"
|
||||
id: extract
|
||||
run: |
|
||||
TITLE="${{ github.event.pull_request.title }}"
|
||||
|
||||
PROJECT=$(echo "${TITLE}" | sed 's/release(\([^)]*\)).*/\1/')
|
||||
if [ -z "$PROJECT" ]; then
|
||||
echo "Error: Could not extract project name from PR title"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION=$(echo "${TITLE}" | sed 's/.*release([^)]*):\W*\(.*\).*/\1/')
|
||||
if [ -z "$VERSION" ]; then
|
||||
echo "Error: Could not extract version from PR title"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd $PROJECT
|
||||
|
||||
PROJECT_NAME=$(make release-tag-name)
|
||||
|
||||
echo "project=$PROJECT" >> $GITHUB_OUTPUT
|
||||
echo "project_name=$PROJECT_NAME" >> $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "tag=$PROJECT_NAME@$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get unreleased changelog content"
|
||||
id: changelog
|
||||
run: |
|
||||
cd ${{ steps.extract.outputs.project }}
|
||||
CHANGELOG_CONTENT=$(nix develop .#cliff -c make changelog-get-unreleased)
|
||||
echo "content<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$CHANGELOG_CONTENT" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Create GitHub Release"
|
||||
run: |
|
||||
gh release create "${{ steps.extract.outputs.tag }}" \
|
||||
--title "${{ steps.extract.outputs.tag }}" \
|
||||
--notes "${{ steps.changelog.outputs.content }}" \
|
||||
--target main
|
||||
env:
|
||||
# We need to use a PAT because GITHUB_TOKEN does not trigger workflows on releases
|
||||
GH_TOKEN: ${{ secrets.GH_PAT }}
|
||||
85
.github/workflows/ci_release.yaml
vendored
Normal file
85
.github/workflows/ci_release.yaml
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
---
|
||||
name: "ci: release"
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
extract-project:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
project: ${{ steps.extract.outputs.project }}
|
||||
version: ${{ steps.extract.outputs.version }}
|
||||
steps:
|
||||
- name: "Extract project and version from tag"
|
||||
id: extract
|
||||
run: |
|
||||
TAG="${{ github.event.release.tag_name }}"
|
||||
|
||||
PROJECT=$(echo "${TAG}" | sed 's/@[^@]*$//')
|
||||
if [ -z "$PROJECT" ]; then
|
||||
echo "Error: Could not extract project name from tag"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION=$(echo "${TAG}" | sed 's/.*@//')
|
||||
if [ -z "$VERSION" ]; then
|
||||
echo "Error: Could not extract version from tag"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "project=$PROJECT" >> $GITHUB_OUTPUT
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Extracted project: $PROJECT, version: $VERSION"
|
||||
|
||||
cli:
|
||||
needs: extract-project
|
||||
if: needs.extract-project.outputs.project == 'cli'
|
||||
uses: ./.github/workflows/cli_release.yaml
|
||||
with:
|
||||
NAME: dashboard
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: ${{ needs.extract-project.outputs.version }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
GH_PAT: ${{ secrets.GH_PAT }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
dashboard:
|
||||
needs: extract-project
|
||||
if: needs.extract-project.outputs.project == '@nhost/dashboard'
|
||||
uses: ./.github/workflows/dashboard_release.yaml
|
||||
with:
|
||||
NAME: dashboard
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: ${{ needs.extract-project.outputs.version }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
VERCEL_TEAM_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.DASHBOARD_VERCEL_PROJECT_ID }}
|
||||
VERCEL_DEPLOY_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_PRODUCTION }}
|
||||
GH_PAT: ${{ secrets.GH_PAT }}
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
nhost-js:
|
||||
needs: extract-project
|
||||
if: needs.extract-project.outputs.project == '@nhost/nhost-js'
|
||||
uses: ./.github/workflows/wf_release_npm.yaml
|
||||
with:
|
||||
NAME: nhost-js
|
||||
PATH: packages/nhost-js
|
||||
VERSION: ${{ needs.extract-project.outputs.version }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_PRODUCTION }}
|
||||
91
.github/workflows/ci_update_changelog.yaml
vendored
Normal file
91
.github/workflows/ci_update_changelog.yaml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
---
|
||||
name: "ci: update changelog"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update-changelog:
|
||||
if: ${{ !startsWith(github.event.head_commit.message, 'release(') }}
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
project: [cli, dashboard, packages/nhost-js]
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
pull-requests: write
|
||||
actions: write
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-cliff-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-cliff-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-cliff-${{ runner.os }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
- name: "Get next version"
|
||||
id: version
|
||||
run: |
|
||||
cd ${{ matrix.project }}
|
||||
TAG_NAME=$(make release-tag-name)
|
||||
VERSION=$(nix develop .\#cliff -c make changelog-next-version)
|
||||
if git tag | grep -q "$TAG_NAME@$VERSION"; then
|
||||
echo "Tag $TAG_NAME@$VERSION already exists, skipping release preparation"
|
||||
else
|
||||
echo "Tag $TAG_NAME@$VERSION does not exist, proceeding with release preparation"
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: "Update changelog"
|
||||
if: steps.version.outputs.version != ''
|
||||
run: |
|
||||
cd ${{ matrix.project }}
|
||||
nix develop .\#cliff -c make changelog-update
|
||||
|
||||
- name: "Create Pull Request"
|
||||
if: steps.version.outputs.version != ''
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "release(${{ matrix.project }}): ${{ steps.version.outputs.version }}"
|
||||
title: "release(${{ matrix.project }}): ${{ steps.version.outputs.version }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
body: |
|
||||
Automated release preparation for ${{ matrix.project }} version ${{ steps.version.outputs.version }}
|
||||
|
||||
Changes:
|
||||
- Updated CHANGELOG.md
|
||||
branch: release/${{ matrix.project }}
|
||||
delete-branch: true
|
||||
labels: |
|
||||
release,${{ matrix.project }}
|
||||
97
.github/workflows/cli_checks.yaml
vendored
Normal file
97
.github/workflows/cli_checks.yaml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
---
|
||||
name: "cli: check and build"
|
||||
on:
|
||||
# pull_request_target:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/cli_checks.yaml'
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/wf_build_artifacts.yaml'
|
||||
- '.github/workflows/cli_test_new_project.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# cli
|
||||
- 'cli/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
GIT_REF: ${{ github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
test_cli_build:
|
||||
uses: ./.github/workflows/cli_test_new_project.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
GIT_REF: ${{ github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
143
.github/workflows/cli_release.yaml
vendored
Normal file
143
.github/workflows/cli_release.yaml
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
---
|
||||
name: "cli: release"
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
required: true
|
||||
type: string
|
||||
GIT_REF:
|
||||
required: true
|
||||
type: string
|
||||
VERSION:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
GH_PAT:
|
||||
required: true
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
push-docker:
|
||||
uses: ./.github/workflows/wf_docker_push_image.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: cli
|
||||
PATH: cli
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
build-multiplatform:
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: cli
|
||||
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 180
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-${{ inputs.NAME }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
- name: Compute common env vars
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
ARCH=$([ "${{ runner.arch }}" == "X64" ] && echo "x86_64" || echo "aarch64")
|
||||
echo "ARCH=${ARCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Build artifact"
|
||||
run: |
|
||||
make build-multiplatform
|
||||
|
||||
- name: "Upload assets"
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_PAT }}
|
||||
run: |
|
||||
export VERSION=${{ steps.vars.outputs.VERSION }}
|
||||
|
||||
mkdir upload
|
||||
|
||||
tar cvzf upload/cli-$VERSION-darwin-amd64.tar.gz -C result/darwin/amd64 cli
|
||||
tar cvzf upload/cli-$VERSION-darwin-arm64.tar.gz -C result/darwin/arm64 cli
|
||||
tar cvzf upload/cli-$VERSION-linux-amd64.tar.gz -C result/linux/amd64 cli
|
||||
tar cvzf upload/cli-$VERSION-linux-arm64.tar.gz -C result/linux/arm64 cli
|
||||
|
||||
cd upload
|
||||
find . -type f -exec sha256sum {} + > ../checksums.txt
|
||||
cd ..
|
||||
|
||||
cat checksums.txt
|
||||
|
||||
gh release upload \
|
||||
--clobber "${{ github.ref_name }}" \
|
||||
./upload/* checksums.txt
|
||||
|
||||
- name: "Cache build"
|
||||
run: |
|
||||
nix store sign --key-file <(echo "${{ secrets.NIX_CACHE_PRIV_KEY }}") --all
|
||||
find /nix/store -maxdepth 1 -name "*-*" -type d | xargs -n 25 nix copy --to s3://nhost-nix-cache\?region=eu-central-1
|
||||
if: always()
|
||||
116
.github/workflows/cli_test_new_project.yaml
vendored
Normal file
116
.github/workflows/cli_test_new_project.yaml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
NHOST_PAT:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
env:
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Collect Workflow Telemetry
|
||||
uses: catchpoint/workflow-telemetry-action@v2
|
||||
with:
|
||||
comment_on_pr: false
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-${{ inputs.NAME }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
- name: "Get artifacts"
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: ~/artifacts
|
||||
|
||||
- name: "Inspect artifacts"
|
||||
run: find ~/artifacts
|
||||
|
||||
- name: Load docker image
|
||||
run: |
|
||||
skopeo copy --insecure-policy \
|
||||
dir:/home/runner/artifacts/cli-docker-image-x86_64-0.0.0-dev \
|
||||
docker-daemon:cli:0.0.0-dev
|
||||
|
||||
- name: "Create a new project"
|
||||
run: |
|
||||
export NHOST_DOMAIN=staging.nhost.run
|
||||
export NHOST_CONFIGSERVER_IMAGE=cli:0.0.0-dev
|
||||
|
||||
unzip /home/runner/artifacts/cli-artifact-x86_64-0.0.0-dev/result.zip
|
||||
|
||||
mkdir new-project
|
||||
cd new-project
|
||||
/home/runner/_work/nhost/nhost/cli/result/bin/cli login --pat ${{ secrets.NHOST_PAT }}
|
||||
/home/runner/_work/nhost/nhost/cli/result/bin/cli init
|
||||
/home/runner/_work/nhost/nhost/cli/result/bin/cli up --down-on-error
|
||||
/home/runner/_work/nhost/nhost/cli/result/bin/cli down --volumes
|
||||
|
||||
- name: "Cache build"
|
||||
run: |
|
||||
nix store sign --key-file <(echo "${{ secrets.NIX_CACHE_PRIV_KEY }}") --all
|
||||
find /nix/store -maxdepth 1 -name "*-*" -type d | xargs -n 25 nix copy --to s3://nhost-nix-cache\?region=eu-central-1
|
||||
if: always()
|
||||
|
||||
78
.github/workflows/codegen_checks.yaml
vendored
Normal file
78
.github/workflows/codegen_checks.yaml
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
---
|
||||
name: "codegen: check and build"
|
||||
on:
|
||||
# pull_request_target:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/codegen_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: codegen
|
||||
PATH: tools/codegen
|
||||
GIT_REF: ${{ github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: codegen
|
||||
PATH: tools/codegen
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
49
.github/workflows/dashboard.yaml
vendored
49
.github/workflows/dashboard.yaml
vendored
@@ -1,49 +0,0 @@
|
||||
name: 'Dashboard'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
env:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: nhost
|
||||
NEXT_PUBLIC_ENV: dev
|
||||
NEXT_TELEMETRY_DISABLED: 1
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- name: Build the application
|
||||
run: pnpm build:dashboard
|
||||
|
||||
tests:
|
||||
name: Tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- name: Run tests
|
||||
run: pnpm test:dashboard
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ env.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ env.TURBO_TEAM }}
|
||||
- run: pnpm lint:dashboard
|
||||
135
.github/workflows/dashboard_checks.yaml
vendored
Normal file
135
.github/workflows/dashboard_checks.yaml
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
---
|
||||
name: "dashboard: check and build"
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/wf_build_artifacts.yaml'
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/dashboard_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# dashboard
|
||||
- "dashboard/**"
|
||||
|
||||
# nhost-js
|
||||
- packages/nhost-js/**
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
deploy-vercel:
|
||||
uses: ./.github/workflows/wf_deploy_vercel.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: dashboard
|
||||
GIT_REF: ${{ github.sha }}
|
||||
ENVIRONMENT: preview
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
VERCEL_TEAM_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
VERCEL_DEPLOY_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
GIT_REF: ${{ github.sha }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
|
||||
e2e_staging:
|
||||
uses: ./.github/workflows/wf_dashboard_e2e_staging.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
- deploy-vercel
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
GIT_REF: ${{ github.sha }}
|
||||
NHOST_TEST_DASHBOARD_URL: ${{ needs.deploy-vercel.outputs.preview-url }}
|
||||
NHOST_TEST_PROJECT_NAME: ${{ vars.NHOST_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_NAME: ${{ vars.NHOST_TEST_ORGANIZATION_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_SLUG: ${{ vars.NHOST_TEST_ORGANIZATION_SLUG }}
|
||||
NHOST_TEST_PERSONAL_ORG_SLUG: ${{ vars.NHOST_TEST_PERSONAL_ORG_SLUG }}
|
||||
NHOST_TEST_PROJECT_SUBDOMAIN: ${{ vars.NHOST_TEST_PROJECT_SUBDOMAIN }}
|
||||
NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME: ${{ vars.NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME }}
|
||||
NHOST_PRO_TEST_PROJECT_NAME: ${{ vars.NHOST_PRO_TEST_PROJECT_NAME }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
DASHBOARD_VERCEL_DEPLOY_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
DASHBOARD_VERCEL_TEAM_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
DASHBOARD_STAGING_VERCEL_PROJECT_ID: ${{ secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
NHOST_TEST_USER_EMAIL: ${{ secrets.NHOST_TEST_USER_EMAIL }}
|
||||
NHOST_TEST_USER_PASSWORD: ${{ secrets.NHOST_TEST_USER_PASSWORD }}
|
||||
NHOST_TEST_PROJECT_ADMIN_SECRET: ${{ secrets.NHOST_TEST_PROJECT_ADMIN_SECRET }}
|
||||
NHOST_TEST_FREE_USER_EMAILS: ${{ secrets.NHOST_TEST_FREE_USER_EMAILS }}
|
||||
PLAYWRIGHT_REPORT_ENCRYPTION_KEY: ${{ secrets.PLAYWRIGHT_REPORT_ENCRYPTION_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
105
.github/workflows/dashboard_release.yaml
vendored
Normal file
105
.github/workflows/dashboard_release.yaml
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
---
|
||||
name: 'dashboard: release'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
required: true
|
||||
type: string
|
||||
GIT_REF:
|
||||
required: true
|
||||
type: string
|
||||
VERSION:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
VERCEL_TEAM_ID:
|
||||
required: true
|
||||
VERCEL_PROJECT_ID:
|
||||
required: true
|
||||
VERCEL_DEPLOY_TOKEN:
|
||||
required: true
|
||||
DISCORD_WEBHOOK:
|
||||
required: false
|
||||
GH_PAT:
|
||||
required: true
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
deploy-vercel:
|
||||
uses: ./.github/workflows/wf_deploy_vercel.yaml
|
||||
with:
|
||||
NAME: dashboard
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
ENVIRONMENT: production
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
VERCEL_TEAM_ID: ${{ secrets.VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
|
||||
VERCEL_DEPLOY_TOKEN: ${{ secrets.VERCEL_DEPLOY_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
GIT_REF: ${{ inputs.GIT_REF }}
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
DOCKER: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
push-docker:
|
||||
uses: ./.github/workflows/wf_docker_push_image.yaml
|
||||
needs:
|
||||
- build_artifacts
|
||||
with:
|
||||
NAME: dashboard
|
||||
PATH: dashboard
|
||||
VERSION: ${{ inputs.VERSION }}
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
bump-cli:
|
||||
name: Bump Dashboard version in the Nhost CLI
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- push-docker
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
|
||||
- name: Bump version in source code
|
||||
run: |
|
||||
find cli -type f -exec sed -i 's/"nhost\/dashboard:[^"]*"/"nhost\/dashboard:${{ inputs.VERSION }}"/g' {} +
|
||||
|
||||
- name: "Create Pull Request"
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
title: "chore(cli): bump nhost/dashboard to ${{ inputs.VERSION }}"
|
||||
commit-message: "chore: bump nhost/dashboard to ${{ inputs.VERSION }}"
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
body: |
|
||||
This PR bumps the Nhost Dashboard Docker image to version ${{ needs.version.outputs.dashboardVersion }}.
|
||||
branch: bump-dashboard-version
|
||||
delete-branch: true
|
||||
22
.github/workflows/dashboard_release_staging.yaml
vendored
Normal file
22
.github/workflows/dashboard_release_staging.yaml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
name: "dashboard: release staging"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy-vercel:
|
||||
uses: ./.github/workflows/wf_deploy_vercel.yaml
|
||||
with:
|
||||
NAME: dashboard
|
||||
GIT_REF: ${{ github.sha }}
|
||||
ENVIRONMENT: production
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
VERCEL_TEAM_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
VERCEL_DEPLOY_TOKEN: ${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_STAGING }}
|
||||
86
.github/workflows/deploy-dashboard.yaml
vendored
86
.github/workflows/deploy-dashboard.yaml
vendored
@@ -1,86 +0,0 @@
|
||||
name: 'dashboard: release form'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
git_ref:
|
||||
type: string
|
||||
description: 'Branch, tag, or commit SHA'
|
||||
required: true
|
||||
|
||||
environment:
|
||||
type: choice
|
||||
description: 'Deployment environment'
|
||||
required: true
|
||||
default: staging
|
||||
options:
|
||||
- staging
|
||||
- production
|
||||
|
||||
workflow_call:
|
||||
inputs:
|
||||
git_ref:
|
||||
required: true
|
||||
type: string
|
||||
environment:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
publish-vercel:
|
||||
name: Publish to Vercel
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.git_ref }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Node and dependencies
|
||||
uses: ./.github/actions/install-dependencies
|
||||
with:
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
TURBO_TEAM: ${{ secrets.TURBO_TEAM }}
|
||||
|
||||
- name: Setup Vercel CLI
|
||||
run: pnpm add -g vercel
|
||||
|
||||
- name: Trigger Vercel deployment
|
||||
env:
|
||||
VERCEL_ORG_ID: ${{ secrets.DASHBOARD_VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ inputs.environment == 'production' && secrets.DASHBOARD_VERCEL_PROJECT_ID || secrets.DASHBOARD_STAGING_VERCEL_PROJECT_ID }}
|
||||
run: |
|
||||
echo "Deploying to: ${{ inputs.environment }}..."
|
||||
vercel pull --environment=production --token=${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
vercel build --prod --token=${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
vercel deploy --prebuilt --prod --token=${{ secrets.DASHBOARD_VERCEL_DEPLOY_TOKEN }}
|
||||
|
||||
- name: Send Discord notification (success)
|
||||
if: success()
|
||||
uses: tsickert/discord-webhook@v7.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_PRODUCTION }}
|
||||
embed-title: "Dashboard Deployment"
|
||||
embed-description: |
|
||||
**Status**: success
|
||||
**Triggered by**: ${{ github.actor }}
|
||||
|
||||
**Inputs**:
|
||||
- Git Ref: ${{ inputs.git_ref }}
|
||||
embed-color: '5763719'
|
||||
|
||||
- name: Send Discord notification (failure)
|
||||
if: failure()
|
||||
uses: tsickert/discord-webhook@v7.0.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_PRODUCTION }}
|
||||
embed-title: "Dashboard Deployment"
|
||||
embed-description: |
|
||||
**Status**: failure
|
||||
**Triggered by**: ${{ github.actor }}
|
||||
|
||||
**Inputs**:
|
||||
- Git Ref: ${{ inputs.git_ref }}
|
||||
embed-color: '15548997'
|
||||
70
.github/workflows/docs_checks.yaml
vendored
Normal file
70
.github/workflows/docs_checks.yaml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
name: "docs: check and build"
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/dashboard_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# docs
|
||||
- docs/**
|
||||
|
||||
# nhost-js
|
||||
- packages/nhost-js/**
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: docs
|
||||
PATH: docs
|
||||
GIT_REF: ${{ github.sha }}
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
94
.github/workflows/examples_demos_checks.yaml
vendored
Normal file
94
.github/workflows/examples_demos_checks.yaml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
---
|
||||
name: "examples/demos: check and build"
|
||||
on:
|
||||
# pull_request_target:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/examples_demos_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# nhpst-js
|
||||
- 'packages/nhost-js/**'
|
||||
|
||||
# demos
|
||||
- 'examples/demos/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: demos
|
||||
PATH: examples/demos
|
||||
GIT_REF: ${{ github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: demos
|
||||
PATH: examples/demos
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
94
.github/workflows/examples_guides_checks.yaml
vendored
Normal file
94
.github/workflows/examples_guides_checks.yaml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
---
|
||||
name: "examples/guides: check and build"
|
||||
on:
|
||||
# pull_request_target:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/examples_guides_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# nhpst-js
|
||||
- 'packages/nhost-js/**'
|
||||
|
||||
# guides
|
||||
- 'examples/guides/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: guides
|
||||
PATH: examples/guides
|
||||
GIT_REF: ${{ github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: guides
|
||||
PATH: examples/guides
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
94
.github/workflows/examples_tutorials_checks.yaml
vendored
Normal file
94
.github/workflows/examples_tutorials_checks.yaml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
---
|
||||
name: "examples/tutorials: check and build"
|
||||
on:
|
||||
# pull_request_target:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/examples_tutorials_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# nhpst-js
|
||||
- 'packages/nhost-js/**'
|
||||
|
||||
# tutorials
|
||||
- 'examples/tutorials/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: tutorials
|
||||
PATH: examples/tutorials
|
||||
GIT_REF: ${{ github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: tutorials
|
||||
PATH: examples/tutorials
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
56
.github/workflows/gen_codeql-analysis.yml
vendored
Normal file
56
.github/workflows/gen_codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push: {}
|
||||
pull_request: {}
|
||||
schedule:
|
||||
- cron: '20 23 * * 3'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
@@ -15,9 +15,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
|
||||
15
.github/workflows/labeler.yaml
vendored
15
.github/workflows/labeler.yaml
vendored
@@ -1,15 +0,0 @@
|
||||
name: 'Pull Request Labeler'
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v4
|
||||
with:
|
||||
repo-token: '${{ secrets.GH_PAT }}'
|
||||
sync-labels: ''
|
||||
91
.github/workflows/nhost-js_checks.yaml
vendored
Normal file
91
.github/workflows/nhost-js_checks.yaml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
---
|
||||
name: "nhost-js: check and build"
|
||||
on:
|
||||
# pull_request_target:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/nhost-js_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
# common go
|
||||
- '.golangci.yaml'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'vendor/**'
|
||||
|
||||
# codegen
|
||||
- 'tools/codegen/**'
|
||||
|
||||
# common javascript
|
||||
- ".npmrc"
|
||||
- ".prettierignore"
|
||||
- ".prettierrc.js"
|
||||
- "audit-ci.jsonc"
|
||||
- "package.json"
|
||||
- "pnpm-workspace.yaml"
|
||||
- "pnpm-lock.yaml"
|
||||
- "turbo.json"
|
||||
|
||||
# nhost-js
|
||||
- 'packages/nhost-js/**'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: nhost-js
|
||||
PATH: packages/nhost-js
|
||||
GIT_REF: ${{ github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: nhost-js
|
||||
PATH: packages/nhost-js
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
70
.github/workflows/nixops_checks.yaml
vendored
Normal file
70
.github/workflows/nixops_checks.yaml
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
name: "nixops: check and build"
|
||||
on:
|
||||
# pull_request_target:
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/wf_check.yaml'
|
||||
- '.github/workflows/nixops_checks.yaml'
|
||||
|
||||
# common build
|
||||
- 'flake.nix'
|
||||
- 'flake.lock'
|
||||
- 'nixops/**'
|
||||
- 'build/**'
|
||||
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check-permissions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: |
|
||||
echo "github.event_name: ${{ github.event_name }}"
|
||||
echo "github.event.pull_request.author_association: ${{ github.event.pull_request.author_association }}"
|
||||
- name: "This task will run and fail if user has no permissions and label safe_to_test isn't present"
|
||||
if: "github.event_name == 'pull_request_target' && ! ( contains(github.event.pull_request.labels.*.name, 'safe_to_test') || contains(fromJson('[\"OWNER\", \"MEMBER\", \"COLLABORATOR\"]'), github.event.pull_request.author_association) )"
|
||||
run: |
|
||||
exit 1
|
||||
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: nixops
|
||||
PATH: nixops
|
||||
GIT_REF: ${{ github.sha }}
|
||||
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
build_artifacts:
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
needs:
|
||||
- check-permissions
|
||||
with:
|
||||
NAME: nixops
|
||||
PATH: nixops
|
||||
GIT_REF: ${{ github.sha }}
|
||||
VERSION: 0.0.0-dev # we use a fixed version here to avoid unnecessary rebuilds
|
||||
DOCKER: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID: ${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}
|
||||
NIX_CACHE_PUB_KEY: ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
NIX_CACHE_PRIV_KEY: ${{ secrets.NIX_CACHE_PRIV_KEY }}
|
||||
|
||||
remove_label:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- check-permissions
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@v1
|
||||
with:
|
||||
labels: |
|
||||
safe_to_test
|
||||
if: contains(github.event.pull_request.labels.*.name, 'safe_to_test')
|
||||
79
.github/workflows/test-nhost-cli-action.yaml
vendored
79
.github/workflows/test-nhost-cli-action.yaml
vendored
@@ -1,79 +0,0 @@
|
||||
name: Test Nhost CLI action
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
types: [opened, synchronize]
|
||||
paths:
|
||||
- '.github/actions/nhost-cli/**'
|
||||
- '!.github/actions/nhost-cli/**/*.md'
|
||||
|
||||
jobs:
|
||||
install:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
- name: should succeed running the nhost command
|
||||
run: nhost
|
||||
|
||||
start:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI and start the application
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
init: true
|
||||
start: true
|
||||
- name: should be running
|
||||
run: curl -sSf 'https://local.hasura.local.nhost.run/' > /dev/null
|
||||
|
||||
stop:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI, start and stop the application
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
init: true
|
||||
start: true
|
||||
stop: true
|
||||
- name: should have no live docker container
|
||||
run: |
|
||||
if [ -z "docker ps -q" ]; then
|
||||
echo "Some docker containers are still running"
|
||||
docker ps
|
||||
exit 1
|
||||
fi
|
||||
|
||||
config:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI and run the application
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
init: true
|
||||
version: v1.29.3
|
||||
start: true
|
||||
- name: should find the injected hasura-auth version
|
||||
run: |
|
||||
VERSION=$(curl -sSf 'https://local.auth.local.nhost.run/v1/version')
|
||||
EXPECTED_VERSION='{"version":"0.36.1"}'
|
||||
if [ "$VERSION" != "$EXPECTED_VERSION" ]; then
|
||||
echo "Expected version $EXPECTED_VERSION but got $VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install the Nhost CLI
|
||||
uses: ./.github/actions/nhost-cli
|
||||
with:
|
||||
version: v1.27.2
|
||||
- name: should find the correct version
|
||||
run: nhost --version | head -n 1 | grep v1.27.2 || exit 1
|
||||
131
.github/workflows/wf_build_artifacts.yaml
vendored
Normal file
131
.github/workflows/wf_build_artifacts.yaml
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
DOCKER:
|
||||
type: boolean
|
||||
required: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
artifacts:
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
os: [blacksmith-4vcpu-ubuntu-2404-arm, blacksmith-2vcpu-ubuntu-2404]
|
||||
fail-fast: true
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 180
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-${{ inputs.NAME }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
# - name: "Verify if nixops is pre-built"
|
||||
# id: verify-nixops-build
|
||||
# run: |
|
||||
# export drvPath=$(make build-nixops-dry-run)
|
||||
# echo "Derivation path: $drvPath"
|
||||
# nix path-info --store s3://nhost-nix-cache\?region=eu-central-1 $drvPath \
|
||||
# || (echo "Wait until nixops is already built and cached and run again" && exit 1)
|
||||
# if: ${{ inputs.NAME != 'nixops' }}
|
||||
|
||||
- name: Compute common env vars
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
ARCH=$([ "${{ runner.arch }}" == "X64" ] && echo "x86_64" || echo "aarch64")
|
||||
echo "ARCH=${ARCH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Build artifact"
|
||||
run: |
|
||||
make build
|
||||
zip -r result.zip result
|
||||
|
||||
- name: "Push artifact to artifact repository"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.NAME }}-artifact-${{ steps.vars.outputs.ARCH }}-${{ steps.vars.outputs.VERSION }}
|
||||
path: ${{ inputs.PATH }}/result.zip
|
||||
retention-days: 7
|
||||
|
||||
- name: "Build docker image"
|
||||
run: |
|
||||
sudo chmod 755 /run/containers
|
||||
sudo mkdir -p "/run/containers/$(id -u runner)"
|
||||
sudo chown runner: "/run/containers/$(id -u runner)"
|
||||
make build-docker-image
|
||||
if: ${{ ( inputs.DOCKER ) }}
|
||||
|
||||
- name: "Push docker image to artifact repository"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ inputs.NAME }}-docker-image-${{ steps.vars.outputs.ARCH }}-${{ steps.vars.outputs.VERSION }}
|
||||
path: ${{ inputs.PATH }}/result
|
||||
retention-days: 7
|
||||
if: ${{ ( inputs.DOCKER ) }}
|
||||
|
||||
- name: "Cache build"
|
||||
run: |
|
||||
nix store sign --key-file <(echo "${{ secrets.NIX_CACHE_PRIV_KEY }}") --all
|
||||
find /nix/store -maxdepth 1 -name "*-*" -type d | xargs -n 25 nix copy --to s3://nhost-nix-cache\?region=eu-central-1
|
||||
if: always()
|
||||
116
.github/workflows/wf_check.yaml
vendored
Normal file
116
.github/workflows/wf_check.yaml
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
NHOST_PAT:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
env:
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Collect Workflow Telemetry
|
||||
uses: catchpoint/workflow-telemetry-action@v2
|
||||
with:
|
||||
comment_on_pr: false
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-${{ inputs.NAME }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
# - name: "Verify if nixops is pre-built"
|
||||
# id: verify-nixops-build
|
||||
# run: |
|
||||
# export drvPath=$(make build-nixops-dry-run)
|
||||
# echo "Derivation path: $drvPath"
|
||||
# nix path-info --store s3://nhost-nix-cache\?region=eu-central-1 $drvPath \
|
||||
# || (echo "Wait until nixops is already built and cached and run again" && exit 1)
|
||||
# if: ${{ inputs.NAME != 'nixops' }}
|
||||
|
||||
- name: "Verify if we need to build"
|
||||
id: verify-build
|
||||
run: |
|
||||
export drvPath=$(make check-dry-run)
|
||||
echo "Derivation path: $drvPath"
|
||||
nix path-info --store s3://nhost-nix-cache\?region=eu-central-1 $drvPath \
|
||||
&& export BUILD_NEEDED=no \
|
||||
|| export BUILD_NEEDED=yes
|
||||
echo BUILD_NEEDED=$BUILD_NEEDED >> $GITHUB_OUTPUT
|
||||
echo DERIVATION_PATH=$drvPath >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Start containters for integration tests"
|
||||
run: |
|
||||
nix develop .\#${{ inputs.NAME }} -c make dev-env-up
|
||||
if: ${{ steps.verify-build.outputs.BUILD_NEEDED == 'yes' }}
|
||||
|
||||
- name: "Run checks"
|
||||
run: make check
|
||||
if: ${{ steps.verify-build.outputs.BUILD_NEEDED == 'yes' }}
|
||||
|
||||
- name: "Cache build"
|
||||
run: |
|
||||
nix store sign --key-file <(echo "${{ secrets.NIX_CACHE_PRIV_KEY }}") --all
|
||||
find /nix/store -maxdepth 1 -name "*-*" -type d | xargs -n 25 nix copy --to s3://nhost-nix-cache\?region=eu-central-1
|
||||
if: always()
|
||||
169
.github/workflows/wf_dashboard_e2e_staging.yaml
vendored
Normal file
169
.github/workflows/wf_dashboard_e2e_staging.yaml
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
NHOST_TEST_DASHBOARD_URL:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_PROJECT_NAME:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_ORGANIZATION_NAME:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_ORGANIZATION_SLUG:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_PERSONAL_ORG_SLUG:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_PROJECT_SUBDOMAIN:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME:
|
||||
type: string
|
||||
required: true
|
||||
NHOST_PRO_TEST_PROJECT_NAME:
|
||||
type: string
|
||||
required: true
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
DASHBOARD_VERCEL_DEPLOY_TOKEN:
|
||||
required: true
|
||||
DASHBOARD_VERCEL_TEAM_ID:
|
||||
required: true
|
||||
DASHBOARD_STAGING_VERCEL_PROJECT_ID:
|
||||
required: true
|
||||
NHOST_TEST_USER_EMAIL:
|
||||
required: true
|
||||
NHOST_TEST_USER_PASSWORD:
|
||||
required: true
|
||||
NHOST_TEST_PROJECT_ADMIN_SECRET:
|
||||
required: true
|
||||
NHOST_TEST_FREE_USER_EMAILS:
|
||||
required: true
|
||||
PLAYWRIGHT_REPORT_ENCRYPTION_KEY:
|
||||
required: true
|
||||
|
||||
env:
|
||||
NEXT_PUBLIC_ENV: dev
|
||||
NEXT_TELEMETRY_DISABLED: 1
|
||||
NHOST_TEST_DASHBOARD_URL: ${{ inputs.NHOST_TEST_DASHBOARD_URL }}
|
||||
NHOST_TEST_PROJECT_NAME: ${{ inputs.NHOST_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_NAME: ${{ inputs.NHOST_TEST_ORGANIZATION_NAME }}
|
||||
NHOST_TEST_ORGANIZATION_SLUG: ${{ inputs.NHOST_TEST_ORGANIZATION_SLUG }}
|
||||
NHOST_TEST_PERSONAL_ORG_SLUG: ${{ inputs.NHOST_TEST_PERSONAL_ORG_SLUG }}
|
||||
NHOST_TEST_PROJECT_SUBDOMAIN: ${{ inputs.NHOST_TEST_PROJECT_SUBDOMAIN }}
|
||||
NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME: ${{ inputs.NHOST_TEST_PROJECT_REMOTE_SCHEMA_NAME }}
|
||||
NHOST_PRO_TEST_PROJECT_NAME: ${{ inputs.NHOST_PRO_TEST_PROJECT_NAME }}
|
||||
NHOST_TEST_USER_EMAIL: ${{ secrets.NHOST_TEST_USER_EMAIL }}
|
||||
NHOST_TEST_USER_PASSWORD: ${{ secrets.NHOST_TEST_USER_PASSWORD }}
|
||||
NHOST_TEST_PROJECT_ADMIN_SECRET: ${{ secrets.NHOST_TEST_PROJECT_ADMIN_SECRET }}
|
||||
NHOST_TEST_FREE_USER_EMAILS: ${{ secrets.NHOST_TEST_FREE_USER_EMAILS }}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Collect Workflow Telemetry
|
||||
uses: catchpoint/workflow-telemetry-action@v2
|
||||
with:
|
||||
comment_on_pr: false
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-${{ inputs.NAME }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
- name: Start CLI
|
||||
run: |
|
||||
nix develop .\#dashboard -c make dev-env-cli-up
|
||||
|
||||
- name: Run e2e tests
|
||||
run: nix develop .\#dashboard -c pnpm e2e
|
||||
|
||||
- name: Run e2e onboarding tests
|
||||
run: nix develop .\#dashboard -c pnpm e2e:onboarding
|
||||
|
||||
- name: Run e2e local tests
|
||||
run: nix develop .\#dashboard -c pnpm e2e:local
|
||||
|
||||
- name: Encrypt Playwright report
|
||||
if: failure()
|
||||
run: |
|
||||
tar -czf playwright-report.tar.gz playwright-report/
|
||||
openssl enc -aes-256-cbc -salt -pbkdf2 -iter 100000 \
|
||||
-in playwright-report.tar.gz \
|
||||
-out playwright-report.tar.gz.enc \
|
||||
-k "${{ secrets.PLAYWRIGHT_REPORT_ENCRYPTION_KEY }}"
|
||||
rm playwright-report.tar.gz
|
||||
|
||||
- name: Upload encrypted Playwright report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
with:
|
||||
name: encrypted-playwright-report-${{ github.run_id }}
|
||||
path: dashboard/playwright-report.tar.gz.enc
|
||||
retention-days: 1
|
||||
|
||||
- name: "Cache build"
|
||||
run: |
|
||||
nix store sign --key-file <(echo "${{ secrets.NIX_CACHE_PRIV_KEY }}") --all
|
||||
find /nix/store -maxdepth 1 -name "*-*" -type d | xargs -n 25 nix copy --to s3://nhost-nix-cache\?region=eu-central-1
|
||||
if: always()
|
||||
134
.github/workflows/wf_deploy_vercel.yaml
vendored
Normal file
134
.github/workflows/wf_deploy_vercel.yaml
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
name: 'deploy to vercel'
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
required: true
|
||||
type: string
|
||||
GIT_REF:
|
||||
required: true
|
||||
type: string
|
||||
ENVIRONMENT:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
VERCEL_TEAM_ID:
|
||||
required: true
|
||||
VERCEL_PROJECT_ID:
|
||||
required: true
|
||||
VERCEL_DEPLOY_TOKEN:
|
||||
required: true
|
||||
DISCORD_WEBHOOK:
|
||||
required: false
|
||||
|
||||
outputs:
|
||||
preview-url:
|
||||
description: "The preview URL from Vercel deployment"
|
||||
value: ${{ jobs.publish-vercel.outputs.preview-url }}
|
||||
|
||||
jobs:
|
||||
publish-vercel:
|
||||
name: Publish to Vercel
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
outputs:
|
||||
preview-url: ${{ steps.deploy.outputs.preview-url }} # Add this line
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-${{ inputs.NAME }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
- name: Trigger Vercel deployment
|
||||
id: deploy
|
||||
env:
|
||||
VERCEL_ORG_ID: ${{ secrets.VERCEL_TEAM_ID }}
|
||||
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
|
||||
run: |
|
||||
TARGET_OPTS="--target=${{ inputs.ENVIRONMENT }}"
|
||||
echo "Deploying to: ${{ inputs.ENVIRONMENT }}..."
|
||||
nix develop .\#vercel -c \
|
||||
vercel pull --environment=${{ inputs.ENVIRONMENT }} --token=${{ secrets.VERCEL_DEPLOY_TOKEN }}
|
||||
nix develop .\#vercel -c \
|
||||
vercel build $TARGET_OPTS --token=${{ secrets.VERCEL_DEPLOY_TOKEN }}
|
||||
nix develop .\#vercel -c \
|
||||
vercel deploy $TARGET_OPTS --prebuilt --token=${{ secrets.VERCEL_DEPLOY_TOKEN }} | tee /tmp/vercel_output
|
||||
|
||||
PREVIEW_URL=$(cat /tmp/vercel_output)
|
||||
echo "\n🔗🔗🔗 Preview URL: $PREVIEW_URL"
|
||||
echo "preview-url=$PREVIEW_URL" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: marocchino/sticky-pull-request-comment@v2
|
||||
with:
|
||||
header: "vercel-${{ inputs.NAME }}-${{ inputs.ENVIRONMENT }}"
|
||||
message: |
|
||||
# Vercel Deployment Info - ${{ inputs.NAME }}
|
||||
|
||||
* URL: ${{ steps.deploy.outputs.preview-url }}
|
||||
* Git Ref: `${{ inputs.GIT_REF }}`
|
||||
* Commit: `${{ github.event.pull_request.head.sha || github.sha }}`
|
||||
|
||||
if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target'
|
||||
|
||||
- name: Send Discord notification
|
||||
if: always()
|
||||
uses: ./.github/actions/discord-notification
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
title: "Deployed ${{ inputs.NAME }} to Vercel"
|
||||
description: |
|
||||
**Environment**: ${{ inputs.ENVIRONMENT }}
|
||||
**URL**: ${{ steps.deploy.outputs.preview-url }}
|
||||
**Triggered by**: ${{ github.actor }}
|
||||
**Status**: ${{ job.status }}
|
||||
|
||||
**Details**:
|
||||
- Git Ref: ${{ inputs.GIT_REF }}
|
||||
- Commit: ${{ github.event.pull_request.head.sha || github.sha }}
|
||||
color: ${{ job.status == 'success' && '5763719' || '15548997' }}
|
||||
|
||||
- run: rm -rf .vercel
|
||||
if: always()
|
||||
79
.github/workflows/wf_docker_push_image.yaml
vendored
Normal file
79
.github/workflows/wf_docker_push_image.yaml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
push-to-registry:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: "Compute common env vars"
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VER=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get artifacts"
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
path: ~/artifacts
|
||||
|
||||
- name: "Inspect artifacts"
|
||||
run: find ~/artifacts
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: "Push docker image to docker hub"
|
||||
run: |
|
||||
export NAME=${{ inputs.NAME }}
|
||||
export VERSION=${{ steps.vars.outputs.VERSION }}
|
||||
export CONTAINER_REGISTRY=nhost
|
||||
export CONTAINER_NAME=$CONTAINER_REGISTRY/$NAME
|
||||
|
||||
for ARCH in "x86_64" "aarch64"; do
|
||||
skopeo copy --insecure-policy \
|
||||
dir:/home/runner/artifacts/${{ inputs.NAME }}-docker-image-$ARCH-$VERSION \
|
||||
docker-daemon:$CONTAINER_NAME:$VERSION-$ARCH
|
||||
docker push $CONTAINER_NAME:$VERSION-$ARCH
|
||||
done
|
||||
|
||||
docker manifest create \
|
||||
$CONTAINER_NAME:$VERSION \
|
||||
--amend $CONTAINER_NAME:$VERSION-x86_64 \
|
||||
--amend $CONTAINER_NAME:$VERSION-aarch64
|
||||
|
||||
docker manifest push $CONTAINER_NAME:$VERSION
|
||||
113
.github/workflows/wf_release_npm.yaml
vendored
Normal file
113
.github/workflows/wf_release_npm.yaml
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
NAME:
|
||||
type: string
|
||||
required: true
|
||||
PATH:
|
||||
type: string
|
||||
required: true
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
secrets:
|
||||
NPM_TOKEN:
|
||||
required: true
|
||||
AWS_ACCOUNT_ID:
|
||||
required: true
|
||||
NIX_CACHE_PUB_KEY:
|
||||
required: true
|
||||
NIX_CACHE_PRIV_KEY:
|
||||
required: true
|
||||
DISCORD_WEBHOOK:
|
||||
required: false
|
||||
|
||||
jobs:
|
||||
release:
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
timeout-minutes: 30
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ inputs.PATH }}
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
actions: read
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: cachix/install-nix-action@v31
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.28.4/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
keep-env-derivations = true
|
||||
keep-outputs = true
|
||||
|
||||
- name: Restore and save Nix store
|
||||
uses: nix-community/cache-nix-action@v6
|
||||
with:
|
||||
primary-key: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('**/*.nix', '**/flake.lock') }}
|
||||
restore-prefixes-first-match: nix-${{ inputs.NAME }}-${{ runner.os }}-${{ runner.arch }}}-
|
||||
gc-max-store-size-linux: 2G
|
||||
purge: true
|
||||
purge-prefixes: nix-${{ inputs.NAME }}-
|
||||
purge-created: 0
|
||||
purge-last-accessed: 0
|
||||
purge-primary-key: never
|
||||
|
||||
- name: "Build package"
|
||||
run: make build
|
||||
|
||||
- name: "Copy build output"
|
||||
run: cp -r result/dist .
|
||||
|
||||
- name: "Set package version"
|
||||
run: |
|
||||
nix develop .#pnpm -c pnpm version ${{ inputs.VERSION }} --no-git-tag-version
|
||||
|
||||
- name: "Determine npm tag"
|
||||
id: npm-tag
|
||||
run: |
|
||||
if [[ "${{ inputs.VERSION }}" =~ (alpha|beta|dev|rc) ]]; then
|
||||
echo "tag=beta" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=latest" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: "Publish to npm"
|
||||
run: |
|
||||
echo "//registry.npmjs.org/:_authToken=${{ secrets.NPM_TOKEN }}" > ~/.npmrc
|
||||
nix develop .#pnpm -c pnpm publish --tag ${{ steps.npm-tag.outputs.tag }} --no-git-checks
|
||||
|
||||
- name: Send Discord notification
|
||||
if: always()
|
||||
uses: ./.github/actions/discord-notification
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
title: "Published ${{ inputs.NAME }}@${{ inputs.VERSION }} to npm"
|
||||
description: |
|
||||
**Status**: ${{ job.status }}
|
||||
**Tag**: ${{ steps.npm-tag.outputs.tag }}
|
||||
**Triggered by**: ${{ github.actor }}
|
||||
|
||||
**Details**:
|
||||
- Version: ${{ inputs.VERSION }}
|
||||
- Package: ${{ inputs.NAME }}
|
||||
color: ${{ job.status == 'success' && '5763719' || '15548997' }}
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -65,3 +65,11 @@ out/
|
||||
.direnv/
|
||||
|
||||
/.vscode/
|
||||
|
||||
result
|
||||
|
||||
.vitest
|
||||
|
||||
.claude
|
||||
|
||||
letsencrypt/*
|
||||
|
||||
58
.golangci.yaml
Normal file
58
.golangci.yaml
Normal file
@@ -0,0 +1,58 @@
|
||||
version: "2"
|
||||
issues:
|
||||
max-issues-per-linter: 0
|
||||
max-same-issues: 0
|
||||
linters:
|
||||
default: all
|
||||
settings:
|
||||
funlen:
|
||||
lines: 65
|
||||
disable:
|
||||
- canonicalheader
|
||||
- depguard
|
||||
- gomoddirectives
|
||||
- musttag
|
||||
- nlreturn
|
||||
- tagliatelle
|
||||
- varnamelen
|
||||
- wsl
|
||||
- noinlineerr
|
||||
- funcorder
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
rules:
|
||||
- linters:
|
||||
- funlen
|
||||
- ireturn
|
||||
path: _test\.go
|
||||
- linters:
|
||||
- lll
|
||||
source: '^//go:generate '
|
||||
- linters:
|
||||
- gochecknoglobals
|
||||
text: Version is a global variable
|
||||
- linters:
|
||||
- ireturn
|
||||
- lll
|
||||
path: schema\.resolvers\.go
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
enable:
|
||||
- gofmt
|
||||
- gofumpt
|
||||
- goimports
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
- schema\.resolvers\.go
|
||||
8
.npmrc
8
.npmrc
@@ -1,2 +1,8 @@
|
||||
prefer-workspace-packages = true
|
||||
auto-install-peers = false
|
||||
auto-install-peers = true
|
||||
|
||||
# without this setting, pnpm breaks monorepos with multiple versions of the same package
|
||||
shared-workspace-lockfile = false
|
||||
|
||||
# do not enable back, this leads to unlisted dependencies being used
|
||||
hoist = false
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
|
||||
## Requirements
|
||||
|
||||
### Node.js v18
|
||||
|
||||
_⚠️ Node.js v16 is also supported for the time being but support will be dropped in the near future_.
|
||||
### Node.js v20 or later
|
||||
|
||||
### [pnpm](https://pnpm.io/) package manager
|
||||
|
||||
|
||||
50
README.md
50
README.md
@@ -61,27 +61,34 @@ Visit [https://docs.nhost.io](http://docs.nhost.io) for the complete documentati
|
||||
|
||||
Since Nhost is 100% open source, you can self-host the whole Nhost stack. Check out the example [docker-compose file](https://github.com/nhost/nhost/tree/main/examples/docker-compose) to self-host Nhost.
|
||||
|
||||
## Sign In and Make a Graphql Request
|
||||
## Sign In and Make a GraphQL Request
|
||||
|
||||
Install the `@nhost/nhost-js` package and start build your app:
|
||||
Install the `@nhost/nhost-js` package and start building your app:
|
||||
|
||||
```jsx
|
||||
import { NhostClient } from '@nhost/nhost-js'
|
||||
```ts
|
||||
import { createClient } from '@nhost/nhost-js'
|
||||
|
||||
const nhost = new NhostClient({
|
||||
subdomain: '<your-subdomain>',
|
||||
region: '<your-region>'
|
||||
const nhost = createClient({
|
||||
subdomain: 'your-project',
|
||||
region: 'eu-central-1'
|
||||
})
|
||||
|
||||
await nhost.auth.signIn({ email: 'user@domain.com', password: 'userPassword' })
|
||||
await nhost.auth.signInEmailPassword({
|
||||
email: 'user@example.com',
|
||||
password: 'password123'
|
||||
})
|
||||
|
||||
await nhost.graphql.request(`{
|
||||
users {
|
||||
id
|
||||
displayName
|
||||
email
|
||||
}
|
||||
}`)
|
||||
await nhost.graphql.request({
|
||||
query: `
|
||||
query GetUsers {
|
||||
users {
|
||||
id
|
||||
displayName
|
||||
email
|
||||
}
|
||||
}
|
||||
`
|
||||
})
|
||||
```
|
||||
|
||||
## Frontend Agnostic
|
||||
@@ -103,19 +110,8 @@ Nhost is frontend agnostic, which means Nhost works with all frontend frameworks
|
||||
|
||||
## Nhost Clients
|
||||
|
||||
- [JavaScript/TypeScript](https://docs.nhost.io/reference/javascript/nhost-js/nhost-client)
|
||||
- [JavaScript/TypeScript](https://docs.nhost.io/reference/javascript/nhost-js/main)
|
||||
- [Dart and Flutter](https://github.com/nhost/nhost-dart)
|
||||
- [React](https://docs.nhost.io/reference/react/nhost-client)
|
||||
- [Next.js](https://docs.nhost.io/reference/nextjs/nhost-client)
|
||||
- [Vue](https://docs.nhost.io/reference/vue/nhost-client)
|
||||
|
||||
## Integrations
|
||||
|
||||
- [Apollo](./integrations/apollo#nhostapollo)
|
||||
- [React Apollo](./integrations/react-apollo#nhostreact-apollo)
|
||||
- [React URQL](./integrations/react-urql#nhostreact-urql)
|
||||
- [Stripe GraphQL API](./integrations/stripe-graphql-js#nhoststripe-graphql-js)
|
||||
- [Google Translation GraphQL API](./integrations/google-translation#nhostgoogle-translation)
|
||||
|
||||
## Applications
|
||||
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
// $schema provides code completion hints to IDEs.
|
||||
"$schema": "https://github.com/IBM/audit-ci/raw/main/docs/schema.json",
|
||||
"moderate": true,
|
||||
"allowlist": ["vue-template-compiler"]
|
||||
"allowlist": ["vue-template-compiler", { "id": "CVE-2025-48068", "path": "next" }]
|
||||
}
|
||||
|
||||
43
biome.json
Normal file
43
biome.json
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.2/schema.json",
|
||||
"vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true },
|
||||
"files": { "ignoreUnknown": false },
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2,
|
||||
"lineWidth": 80
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": true,
|
||||
"complexity": {
|
||||
"useLiteralKeys": "off"
|
||||
}
|
||||
},
|
||||
"includes": ["**", "!.next", "!node_modules"]
|
||||
},
|
||||
"javascript": { "formatter": { "quoteStyle": "double" }, "globals": [] },
|
||||
"assist": {
|
||||
"enabled": true,
|
||||
"actions": { "source": { "organizeImports": "on" } }
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"includes": ["**/*.svelte", "**/*.astro", "**/*.vue"],
|
||||
"linter": {
|
||||
"rules": {
|
||||
"style": {
|
||||
"useConst": "off",
|
||||
"useImportType": "off"
|
||||
},
|
||||
"correctness": {
|
||||
"noUnusedVariables": "off",
|
||||
"noUnusedImports": "off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
29
build/configs/README.md
Normal file
29
build/configs/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# Configuration Files
|
||||
|
||||
This directory contains standardized configurations for various tools and frameworks used across the repository.
|
||||
|
||||
## Available Configurations
|
||||
|
||||
- [**TypeScript (`/tsconfig`)**](./tsconfig/README.md): Centralized TypeScript configurations for different project types
|
||||
|
||||
- Standard base configuration with strict type checking
|
||||
- Specialized configurations for libraries, frontend apps, and Node.js
|
||||
- Documented usage patterns and extension points
|
||||
|
||||
## Using the Configurations
|
||||
|
||||
Each configuration directory contains a README with specific instructions on how to use the configurations in your projects.
|
||||
|
||||
## Benefits
|
||||
|
||||
- **Consistency**: All projects follow the same standards and best practices
|
||||
- **Maintainability**: Configuration changes can be made in one place and propagated to all projects
|
||||
- **Onboarding**: New projects can quickly adopt the standard configurations
|
||||
|
||||
## Adding New Configurations
|
||||
|
||||
When adding new centralized configurations:
|
||||
|
||||
1. Create a new subdirectory with an appropriate name
|
||||
2. Include a README.md explaining the configurations
|
||||
3. Document both the usage and the reasoning behind configuration choices
|
||||
58
build/configs/tsconfig/README.md
Normal file
58
build/configs/tsconfig/README.md
Normal file
@@ -0,0 +1,58 @@
|
||||
# TypeScript Configurations
|
||||
|
||||
This directory contains centralized TypeScript configurations that can be extended by projects in the monorepo. Using centralized configurations ensures consistency across projects and makes it easier to maintain and update TypeScript settings.
|
||||
|
||||
## Base Configurations
|
||||
|
||||
- `base.json`: Core TypeScript settings used by all projects
|
||||
- `library.json`: Settings for libraries and SDK packages
|
||||
- `frontend.json`: Settings for frontend applications (React, Next.js)
|
||||
- `node.json`: Settings for Node.js applications and scripts
|
||||
- `vite.json`: Settings for Vite configuration files
|
||||
|
||||
## Usage
|
||||
|
||||
In your project's `tsconfig.json` file, extend the appropriate base configuration:
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"extends": "../../configs/tsconfig/frontend.json",
|
||||
"compilerOptions": {
|
||||
// Project-specific overrides here
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration Features
|
||||
|
||||
### Common Features
|
||||
|
||||
- Strict type checking
|
||||
- Modern ES features
|
||||
- Comprehensive linting rules
|
||||
- Proper module resolution
|
||||
|
||||
### Library Configuration
|
||||
|
||||
- Declaration file generation
|
||||
- Source maps
|
||||
- Composite project support
|
||||
|
||||
### Frontend Configuration
|
||||
|
||||
- JSX support
|
||||
- DOM typings
|
||||
- Bundler module resolution
|
||||
- Compatible with both React and Next.js
|
||||
- Configurable for specific framework needs
|
||||
|
||||
## Creating New Projects
|
||||
|
||||
When creating a new project:
|
||||
|
||||
1. Identify the appropriate base configuration for your project type
|
||||
2. Create a minimal `tsconfig.json` that extends the base configuration from the `configs/tsconfig` directory
|
||||
3. Add only project-specific customizations to your `tsconfig.json`
|
||||
|
||||
This approach ensures all projects follow the same standards while allowing for project-specific needs.
|
||||
34
build/configs/tsconfig/base.json
Normal file
34
build/configs/tsconfig/base.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Base Configuration",
|
||||
"compilerOptions": {
|
||||
/* Environment and Features */
|
||||
"lib": ["ESNext"],
|
||||
"target": "ES2022",
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitOverride": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noPropertyAccessFromIndexSignature": true,
|
||||
"allowUnusedLabels": false,
|
||||
"allowUnreachableCode": false,
|
||||
|
||||
/* Module Resolution */
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
|
||||
/* Advanced Options */
|
||||
"verbatimModuleSyntax": true,
|
||||
"isolatedModules": true
|
||||
},
|
||||
"exclude": ["node_modules", "**/dist", "**/build"]
|
||||
}
|
||||
25
build/configs/tsconfig/frontend.json
Normal file
25
build/configs/tsconfig/frontend.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Frontend Configuration",
|
||||
"extends": "./base.json",
|
||||
"compilerOptions": {
|
||||
/* Frontend Specific */
|
||||
"lib": ["ESNext", "DOM", "DOM.Iterable"],
|
||||
"jsx": "react-jsx",
|
||||
|
||||
/* Module Resolution */
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"noEmit": true,
|
||||
|
||||
/* Additional Options */
|
||||
"allowJs": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"incremental": true,
|
||||
|
||||
/* Next.js Compatibility (ignored by non-Next.js projects) */
|
||||
"plugins": []
|
||||
},
|
||||
"include": ["src/**/*", "**/*.ts", "**/*.tsx"],
|
||||
"exclude": ["node_modules", "**/node_modules/*"]
|
||||
}
|
||||
30
build/configs/tsconfig/library.json
Normal file
30
build/configs/tsconfig/library.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Library/SDK Configuration",
|
||||
"extends": "./base.json",
|
||||
"compilerOptions": {
|
||||
/* Output Configuration */
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"noEmit": false,
|
||||
"composite": true,
|
||||
"importHelpers": true,
|
||||
|
||||
/* Library-specific */
|
||||
"moduleResolution": "node",
|
||||
|
||||
/* Types */
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"**/*.test.ts",
|
||||
"**/*.spec.ts",
|
||||
"**/__tests__/**",
|
||||
"dist",
|
||||
"**/dist/*"
|
||||
]
|
||||
}
|
||||
23
build/configs/tsconfig/node.json
Normal file
23
build/configs/tsconfig/node.json
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Node.js Configuration",
|
||||
"extends": "./base.json",
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"target": "ES2022",
|
||||
|
||||
"allowJs": true,
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
|
||||
/* Node-specific options */
|
||||
"sourceMap": true,
|
||||
|
||||
/* Types */
|
||||
"types": ["node"]
|
||||
},
|
||||
"exclude": ["node_modules", "**/node_modules/*"]
|
||||
}
|
||||
14
build/configs/tsconfig/vite.json
Normal file
14
build/configs/tsconfig/vite.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"display": "Vite Configuration",
|
||||
"extends": "./node.json",
|
||||
"compilerOptions": {
|
||||
"composite": true,
|
||||
"skipLibCheck": true,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
||||
117
build/makefiles/general.makefile
Normal file
117
build/makefiles/general.makefile
Normal file
@@ -0,0 +1,117 @@
|
||||
PROJ_DIR=$(abspath .)
|
||||
PROJ=$(subst $(ROOT_DIR)/,,$(PROJ_DIR))
|
||||
NAME=$(notdir $(PROJ))
|
||||
|
||||
include $(ROOT_DIR)/build/makefiles/release.makefile
|
||||
|
||||
ifdef VER
|
||||
VERSION=$(shell echo $(VER) | sed -e 's/^v//g' -e 's/\//_/g')
|
||||
else
|
||||
VERSION=$(shell grep -oP 'version\s*=\s*"\K[^"]+' project.nix | head -n 1)
|
||||
endif
|
||||
|
||||
ifeq ($(shell uname -m),x86_64)
|
||||
ARCH?=x86_64
|
||||
else ifeq ($(shell uname -m),arm64)
|
||||
ARCH?=aarch64
|
||||
else ifeq ($(shell uname -m),aarch64)
|
||||
ARCH?=aarch64
|
||||
else
|
||||
ARCH?=FIXME-$(shell uname -m)
|
||||
endif
|
||||
|
||||
ifeq ($(shell uname -o),Darwin)
|
||||
OS?=darwin
|
||||
else
|
||||
OS?=linux
|
||||
endif
|
||||
|
||||
ifeq ($(CI),true)
|
||||
docker-build-options=--option system $(ARCH)-linux --extra-platforms ${ARCH}-linux
|
||||
endif
|
||||
|
||||
|
||||
.PHONY: help
|
||||
help: ## Show this help.
|
||||
@echo
|
||||
@awk 'BEGIN { \
|
||||
FS = "##"; \
|
||||
printf "Usage: make \033[36m<target>\033[0m\n"} \
|
||||
/^[a-zA-Z_-]+%?:.*?##/ { printf " \033[36m%-38s\033[0m %s\n", $$1, $$2 } ' \
|
||||
$(MAKEFILE_LIST)
|
||||
|
||||
.PHONY: print-vars
|
||||
print-vars: ## print all variables
|
||||
@$(foreach V,$(sort $(.VARIABLES)), \
|
||||
$(if $(filter-out environment% default automatic, \
|
||||
$(origin $V)),$(info $V=$($V) ($(value $V)))))
|
||||
|
||||
|
||||
.PHONY: get-version
|
||||
get-version: ## Return version
|
||||
@sed -i '/^\s*version = "0.0.0-dev";/s//version = "${VERSION}";/' project.nix
|
||||
@sed -i '/^\s*created = "1970-.*";/s//created = "${shell date --utc '+%Y-%m-%dT%H:%M:%SZ'}";/' project.nix
|
||||
@echo $(VERSION)
|
||||
|
||||
|
||||
.PHONY: _check-pre
|
||||
_check-pre: ## Pre-checks before running nix flake check
|
||||
|
||||
|
||||
.PHONY: check
|
||||
check: _check-pre ## Run nix flake check
|
||||
nix build \
|
||||
--print-build-logs \
|
||||
.\#checks.$(ARCH)-$(OS).$(NAME)
|
||||
|
||||
|
||||
.PHONY: check-dry-run
|
||||
check-dry-run: ## Returns the derivation of the check
|
||||
@nix build \
|
||||
--dry-run \
|
||||
--json \
|
||||
.\#checks.$(ARCH)-$(OS).$(NAME) | jq -r '.[].outputs.out'
|
||||
|
||||
|
||||
.PHONY: build
|
||||
build: ## Build application and places the binary under ./result/bin
|
||||
nix build \
|
||||
--print-build-logs \
|
||||
.\#packages.$(ARCH)-$(OS).$(NAME)
|
||||
|
||||
|
||||
.PHONY: build-dry-run
|
||||
build-dry-run: ## Run nix flake check
|
||||
@nix build \
|
||||
--dry-run \
|
||||
--json \
|
||||
.\#packages.$(ARCH)-$(OS).$(NAME) | jq -r '.[].outputs.out'
|
||||
|
||||
|
||||
.PHONY: build-nixops-dry-run
|
||||
build-nixops-dry-run: ## Checks if nixops needs to be rebuilt
|
||||
@nix build \
|
||||
--dry-run \
|
||||
--json \
|
||||
.\#packages.$(ARCH)-$(OS).nixops | jq -r '.[].outputs.out'
|
||||
|
||||
|
||||
.PHONY: build-docker-image
|
||||
build-docker-image: ## Build docker container for native architecture
|
||||
nix build $(docker-build-options) --show-trace \
|
||||
.\#packages.$(ARCH)-linux.$(NAME)-docker-image \
|
||||
--print-build-logs
|
||||
nix develop \#skopeo -c \
|
||||
skopeo copy --insecure-policy dir:./result docker-daemon:$(NAME):$(VERSION)
|
||||
|
||||
|
||||
.PHONY: dev-env-up
|
||||
dev-env-up: _dev-env-build _dev-env-up ## Starts development environment
|
||||
|
||||
|
||||
.PHONY: dev-env-down
|
||||
dev-env-down: _dev-env-down ## Stops development environment
|
||||
|
||||
|
||||
.PHONY: dev-env-build
|
||||
dev-env-build: _dev-env-build ## Builds development environment
|
||||
30
build/makefiles/release.makefile
Normal file
30
build/makefiles/release.makefile
Normal file
@@ -0,0 +1,30 @@
|
||||
TAG_NAME?=$(NAME)
|
||||
TAG_PATTERN="^$(TAG_NAME)@\d+\.\d+\.\d+$$"
|
||||
|
||||
|
||||
.PHONY: changelog-init
|
||||
changelog-init: ## Initialize changelog using git-cliff
|
||||
@git cliff -u --tag-pattern "$(TAG_PATTERN)" --bump --tag="$(NAME)/$(VERSION)" --output CHANGELOG.md
|
||||
|
||||
.PHONY: changelog-next-version
|
||||
changelog-next-version: ## Get next version using git-cliff
|
||||
@git cliff -u --bumped-version --tag-pattern $(TAG_PATTERN) $(CLIFF_OPTS) | sed 's/.*@//'
|
||||
|
||||
.PHONY: changelog-get-released
|
||||
changelog-get-released: ## Get changelog for the latest release using git-cliff
|
||||
@git cliff -l --bump --tag-pattern $(TAG_PATTERN) $(CLIFF_OPTS) --strip all
|
||||
|
||||
|
||||
.PHONY: changelog-get-unreleased
|
||||
changelog-get-unreleased: ## Get changelog for the following release using git-cliff
|
||||
@git cliff -u --bump --tag-pattern $(TAG_PATTERN) $(CLIFF_OPTS) --strip all
|
||||
|
||||
|
||||
.PHONY: changelog-update
|
||||
changelog-update: ## Update changelog using git-cliff
|
||||
@git cliff -u --bump --tag-pattern $(TAG_PATTERN) $(CLIFF_OPTS) --prepend CHANGELOG.md
|
||||
|
||||
|
||||
.PHONY: release-tag-name
|
||||
release-tag-name: ## Get the tag name for the current version
|
||||
@echo "$(TAG_NAME)"
|
||||
20
cli/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
20
cli/.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
## Description
|
||||
<!--
|
||||
Use one of the following title prefix to categorize the pull request:
|
||||
feat: mark this pull request as a feature
|
||||
fix: mark this pull request as a bug fix
|
||||
chore: mark this pull request as a maintenance item
|
||||
|
||||
To auto merge this pull request when it was approved
|
||||
by another member of the organization: set the label `auto-merge`
|
||||
-->
|
||||
## Problem
|
||||
A short description of the problem this PR is addressing.
|
||||
|
||||
## Solution
|
||||
A short description of the chosen method to resolve the problem
|
||||
with an overview of the logic and implementation details when needed.
|
||||
|
||||
## Notes
|
||||
Other notes that you want to share but do not fit into _Problem_ or _Solution_.
|
||||
|
||||
36
cli/.github/cert.sh
vendored
Executable file
36
cli/.github/cert.sh
vendored
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
mkdir -p /tmp/letsencrypt
|
||||
|
||||
echo "Generating SSL certificate for hostnames: local.nhost.run, local.graphql.nhost.run, local.auth.nhost.run, local.storage.nhost.run, local.functions.nhost.run, local.mail.nhost.run"
|
||||
docker run --rm \
|
||||
--name certbot \
|
||||
-e AWS_ACCESS_KEY_ID \
|
||||
-e AWS_SECRET_ACCESS_KEY \
|
||||
-e AWS_SESSION_TOKEN \
|
||||
-e AWS_REGION \
|
||||
-v /tmp/letsencrypt:/etc/letsencrypt \
|
||||
-v /tmp/letsencrypt:/var/lib/letsencrypt \
|
||||
certbot/dns-route53 certonly --dns-route53 --dns-route53-propagation-seconds 60 \
|
||||
-d local.auth.nhost.run \
|
||||
-d local.dashboard.nhost.run \
|
||||
-d local.db.nhost.run \
|
||||
-d local.functions.nhost.run \
|
||||
-d local.graphql.nhost.run \
|
||||
-d local.hasura.nhost.run \
|
||||
-d local.mailhog.nhost.run \
|
||||
-d local.storage.nhost.run \
|
||||
-d *.auth.local.nhost.run \
|
||||
-d *.dashboard.local.nhost.run \
|
||||
-d *.db.local.nhost.run \
|
||||
-d *.functions.local.nhost.run \
|
||||
-d *.graphql.local.nhost.run \
|
||||
-d *.hasura.local.nhost.run \
|
||||
-d *.mailhog.local.nhost.run \
|
||||
-d *.storage.local.nhost.run \
|
||||
-m 'admin@nhost.io' --non-interactive --agree-tos --server https://acme-v02.api.letsencrypt.org/directory
|
||||
|
||||
sudo cp /tmp/letsencrypt/live/local.db.nhost.run/fullchain.pem ssl/.ssl/
|
||||
sudo cp /tmp/letsencrypt/live/local.db.nhost.run/privkey.pem ssl/.ssl/
|
||||
8
cli/.github/labeler.yml
vendored
Normal file
8
cli/.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
labels:
|
||||
'feature':
|
||||
- '^(?i:feat)'
|
||||
- '^(?i:feature)'
|
||||
'fix':
|
||||
- '^(?i:fix)'
|
||||
'chore':
|
||||
- '^(?i:chore)'
|
||||
39
cli/.github/release-drafter.yml
vendored
Normal file
39
cli/.github/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name-template: 'v$RESOLVED_VERSION'
|
||||
tag-template: 'v$RESOLVED_VERSION'
|
||||
categories:
|
||||
- title: '🚀 Features'
|
||||
label: 'feature'
|
||||
- title: '🐛 Bug Fixes'
|
||||
label: 'fix'
|
||||
- title: '🧰 Maintenance'
|
||||
label: 'chore'
|
||||
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
|
||||
change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
|
||||
version-resolver:
|
||||
major:
|
||||
labels:
|
||||
- 'major'
|
||||
minor:
|
||||
labels:
|
||||
- 'minor'
|
||||
patch:
|
||||
labels:
|
||||
- 'patch'
|
||||
default: patch
|
||||
autolabeler:
|
||||
- label: 'feature'
|
||||
title:
|
||||
- '/^feat/i'
|
||||
- '/^feature/i'
|
||||
- label: 'fix'
|
||||
title:
|
||||
- '/^fix/i'
|
||||
- label: 'chore'
|
||||
title:
|
||||
- '/^chore/i'
|
||||
prerelease: true
|
||||
template: |
|
||||
## Changes
|
||||
|
||||
$CHANGES
|
||||
|
||||
16
cli/.github/stale.yml
vendored
Normal file
16
cli/.github/stale.yml
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
daysUntilStale: 180
|
||||
daysUntilClose: 7
|
||||
limitPerRun: 30
|
||||
onlyLabels: []
|
||||
exemptLabels: []
|
||||
|
||||
exemptProjects: false
|
||||
exemptMilestones: false
|
||||
exemptAssignees: false
|
||||
staleLabel: stale
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
17
cli/.github/workflows/assign_labels.yml
vendored
Normal file
17
cli/.github/workflows/assign_labels.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
# this workflow will run on all pull requests opened but in the context of the base of the pull request.
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
|
||||
name: "assign labels"
|
||||
jobs:
|
||||
# labeler will label pull requests based on their title.
|
||||
# the configuration is at .github/labeler.yml.
|
||||
label_pull_request:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Label Pull Request
|
||||
uses: jimschubert/labeler-action@v2
|
||||
with:
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
53
cli/.github/workflows/build-cert-weekly.yaml.disabled
vendored
Normal file
53
cli/.github/workflows/build-cert-weekly.yaml.disabled
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
---
|
||||
name: "build certificate weekly"
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * 1'
|
||||
|
||||
jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::796351718684:role/github-actions-nhost-cli
|
||||
aws-region: eu-central-1
|
||||
|
||||
- name: fetch let's encrypt cert
|
||||
id: certs
|
||||
run: |
|
||||
.github/cert.sh
|
||||
|
||||
echo "CERT_FULL_CHAIN<<EOF" >> $GITHUB_OUTPUT
|
||||
sudo cat /tmp/letsencrypt/live/local.db.nhost.run/fullchain.pem >> "$GITHUB_OUTPUT"
|
||||
echo EOF >> $GITHUB_OUTPUT
|
||||
|
||||
echo "CERT_PRIV_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
sudo cat /tmp/letsencrypt/live/local.db.nhost.run/privkey.pem >> "$GITHUB_OUTPUT"
|
||||
echo EOF >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
- uses: hmanzur/actions-set-secret@v2.0.0
|
||||
with:
|
||||
name: 'CERT_FULL_CHAIN'
|
||||
value: "${{ steps.certs.outputs.CERT_FULL_CHAIN }}"
|
||||
repository: nhost/cli
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
|
||||
- uses: hmanzur/actions-set-secret@v2.0.0
|
||||
with:
|
||||
name: 'CERT_PRIV_KEY'
|
||||
value: "${{ steps.certs.outputs.CERT_PRIV_KEY }}"
|
||||
repository: nhost/cli
|
||||
token: ${{ secrets.GH_PAT }}
|
||||
27
cli/.github/workflows/checks.yaml
vendored
Normal file
27
cli/.github/workflows/checks.yaml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: "check and build"
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
secrets:
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
build_artifacts:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
GOOS: ["darwin", "linux"]
|
||||
GOARCH: ["amd64", "arm64"]
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
GOOS: ${{ matrix.GOOS }}
|
||||
GOARCH: ${{ matrix.GOARCH }}
|
||||
VERSION: ${{ github.sha }}
|
||||
secrets:
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript' ]
|
||||
language: [ 'go' ]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
||||
|
||||
27
cli/.github/workflows/gen_ai_review.yaml
vendored
Normal file
27
cli/.github/workflows/gen_ai_review.yaml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
---
|
||||
name: "gen: AI review"
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, ready_for_review]
|
||||
issue_comment:
|
||||
jobs:
|
||||
pr_agent_job:
|
||||
if: ${{ github.event.sender.type != 'Bot' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
name: Run pr agent on every pull request, respond to user comments
|
||||
steps:
|
||||
- name: PR Agent action step
|
||||
id: pragent
|
||||
uses: Codium-ai/pr-agent@v0.29
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENAI_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
config.max_model_tokens: 100000
|
||||
config.model: "anthropic/claude-sonnet-4-20250514"
|
||||
config.model_turbo: "anthropic/claude-sonnet-4-20250514"
|
||||
ignore.glob: "['vendor/**','**/client_gen.go','**/models_gen.go','**/generated.go','**/*.gen.go']"
|
||||
91
cli/.github/workflows/gen_schedule_update_deps.yaml
vendored
Normal file
91
cli/.github/workflows/gen_schedule_update_deps.yaml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
---
|
||||
name: "gen: update depenendencies"
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 1 2,5,8,11 *'
|
||||
|
||||
jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure aws
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: arn:aws:iam::${{ secrets.AWS_PRODUCTION_CORE_ACCOUNT_ID }}:role/github-actions-nhost-${{ github.event.repository.name }}
|
||||
aws-region: eu-central-1
|
||||
|
||||
- uses: nixbuild/nix-quick-install-action@v26
|
||||
with:
|
||||
nix_version: 2.16.2
|
||||
nix_conf: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40 s3://nhost-nix-cache?region=eu-central-1&priority=50
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= ${{ secrets.NIX_CACHE_PUB_KEY }}
|
||||
|
||||
- name: Cache nix store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /nix
|
||||
key: nix-update-deps-${{ hashFiles('flakes.nix', 'flake.lock') }}
|
||||
|
||||
- name: Update nix flakes
|
||||
run: nix flake update
|
||||
|
||||
- uses: shaunco/ssh-agent@git-repo-mapping
|
||||
with:
|
||||
ssh-private-key: |
|
||||
${{ secrets.NHOST_BE_DEPLOY_SSH_PRIVATE_KEY}}
|
||||
repo-mappings: |
|
||||
github.com/nhost/be
|
||||
|
||||
- name: Update golang dependencies
|
||||
run: |
|
||||
export GOPRIVATE=github.com/nhost/be
|
||||
nix develop -c bash -c "
|
||||
go mod tidy
|
||||
go get -u $(cat go.mod | grep nhost\/be | tr ' ' '@') ./...
|
||||
go mod tidy
|
||||
go mod vendor
|
||||
"
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: Update dependencies
|
||||
committer: GitHub <noreply@github.com>
|
||||
author: ${{ github.actor }} <${{ github.actor }}@users.noreply.github.com>
|
||||
signoff: false
|
||||
branch: automated/update-deps
|
||||
delete-branch: true
|
||||
title: '[Scheduled] Update dependencies'
|
||||
body: |
|
||||
Dependencies updated
|
||||
|
||||
Note - If you see this PR and the checks haven't run, close and reopen the PR. See https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs
|
||||
labels: |
|
||||
dependencies
|
||||
draft: false
|
||||
|
||||
- name: "Cache nix store on s3"
|
||||
run: |
|
||||
echo ${{ secrets.NIX_CACHE_PRIV_KEY }} > cache-priv-key.pem
|
||||
nix build .\#devShells.x86_64-linux.default
|
||||
nix store sign --key-file cache-priv-key.pem --all
|
||||
nix copy --to s3://nhost-nix-cache\?region=eu-central-1 .\#devShells.x86_64-linux.default
|
||||
|
||||
- run: rm cache-priv-key.pem
|
||||
if: always()
|
||||
35
cli/.github/workflows/release.yaml
vendored
Normal file
35
cli/.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
name: "release"
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
uses: ./.github/workflows/wf_check.yaml
|
||||
secrets:
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
build_artifacts:
|
||||
strategy:
|
||||
matrix:
|
||||
GOOS: ["darwin", "linux"]
|
||||
GOARCH: ["amd64", "arm64"]
|
||||
uses: ./.github/workflows/wf_build_artifacts.yaml
|
||||
with:
|
||||
GOOS: ${{ matrix.GOOS }}
|
||||
GOARCH: ${{ matrix.GOARCH }}
|
||||
VERSION: ${{ github.ref_name }}
|
||||
secrets:
|
||||
NHOST_PAT: ${{ secrets.NHOST_PAT }}
|
||||
|
||||
publish:
|
||||
uses: ./.github/workflows/wf_publish.yaml
|
||||
needs:
|
||||
- tests
|
||||
- build_artifacts
|
||||
with:
|
||||
VERSION: ${{ github.ref_name }}
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
17
cli/.github/workflows/release_drafter.yml
vendored
Normal file
17
cli/.github/workflows/release_drafter.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: "release drafter"
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
# draft your next release notes as pull requests are merged into "master"
|
||||
# the configuration is at /.github/release-drafter.yml.
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
config-name: release-drafter.yml
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
89
cli/.github/workflows/wf_build_artifacts.yaml
vendored
Normal file
89
cli/.github/workflows/wf_build_artifacts.yaml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
GOOS:
|
||||
type: string
|
||||
required: true
|
||||
GOARCH:
|
||||
type: string
|
||||
required: true
|
||||
secrets:
|
||||
NHOST_PAT:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
artifacts:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
submodules: true
|
||||
|
||||
- uses: cachix/install-nix-action@v27
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.22.3/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=
|
||||
|
||||
- name: Compute common env vars
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VERSION=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Build artifact"
|
||||
run: |
|
||||
make build ARCH=${{ inputs.GOARCH }} OS=${{ inputs.GOOS }}
|
||||
find -L result -type f -exec cp {} nhost-cli \;
|
||||
|
||||
- name: "Push artifact to artifact repository"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cli-${{ steps.vars.outputs.VERSION }}-${{ inputs.GOOS }}-${{ inputs.GOARCH }}
|
||||
path: nhost-cli
|
||||
retention-days: 7
|
||||
|
||||
- name: "Build docker-image"
|
||||
run: |
|
||||
make build-docker-image ARCH=${{ inputs.GOARCH }}
|
||||
if: ${{ ( inputs.GOOS == 'linux' ) }}
|
||||
|
||||
- name: "Create a new project"
|
||||
run: |
|
||||
export NHOST_DOMAIN=staging.nhost.run
|
||||
export NHOST_CONFIGSERVER_IMAGE=nhost/cli:${{ steps.vars.outputs.VERSION }}
|
||||
|
||||
mkdir new-project
|
||||
cd new-project
|
||||
../nhost-cli login --pat ${{ secrets.NHOST_PAT }}
|
||||
../nhost-cli init
|
||||
../nhost-cli up --down-on-error
|
||||
../nhost-cli down
|
||||
if: ${{ ( inputs.GOOS == 'linux' && inputs.GOARCH == 'amd64' ) }}
|
||||
|
||||
- name: "Push docker-image to artifact repository"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cli-docker-image-${{ steps.vars.outputs.VERSION }}-${{ inputs.GOOS }}-${{ inputs.GOARCH }}
|
||||
path: result
|
||||
retention-days: 7
|
||||
if: ${{ ( inputs.GOOS == 'linux' ) }}
|
||||
42
cli/.github/workflows/wf_check.yaml
vendored
Normal file
42
cli/.github/workflows/wf_check.yaml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
GIT_REF:
|
||||
type: string
|
||||
required: false
|
||||
secrets:
|
||||
NHOST_PAT:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
submodules: true
|
||||
|
||||
- uses: cachix/install-nix-action@v27
|
||||
with:
|
||||
install_url: "https://releases.nixos.org/nix/nix-2.22.3/install"
|
||||
install_options: "--no-daemon"
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
sandbox = false
|
||||
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||
substituters = https://cache.nixos.org/?priority=40
|
||||
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=
|
||||
|
||||
- name: "Run checks"
|
||||
run: |
|
||||
export NHOST_PAT=${{ secrets.NHOST_PAT }}
|
||||
make check
|
||||
93
cli/.github/workflows/wf_publish.yaml
vendored
Normal file
93
cli/.github/workflows/wf_publish.yaml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
---
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
VERSION:
|
||||
type: string
|
||||
required: true
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
|
||||
name: release
|
||||
jobs:
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: "Check out repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
ref: ${{ inputs.GIT_REF }}
|
||||
submodules: true
|
||||
|
||||
- name: Compute common env vars
|
||||
id: vars
|
||||
run: |
|
||||
echo "VERSION=$(make get-version VERSION=${{ inputs.VERSION }})" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: "Get artifacts"
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ~/artifacts
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Upload docker images
|
||||
shell: bash
|
||||
run: |
|
||||
export VERSION=${{ steps.vars.outputs.VERSION }}
|
||||
export CONTAINER_NAME=nhost/cli
|
||||
|
||||
skopeo copy --insecure-policy \
|
||||
dir:/home/runner/artifacts/cli-docker-image-$VERSION-linux-amd64 \
|
||||
docker-daemon:$CONTAINER_NAME:$VERSION-amd64
|
||||
docker push $CONTAINER_NAME:$VERSION-amd64
|
||||
|
||||
skopeo copy --insecure-policy \
|
||||
dir:/home/runner/artifacts/cli-docker-image-$VERSION-linux-arm64 \
|
||||
docker-daemon:$CONTAINER_NAME:$VERSION-arm64
|
||||
docker push $CONTAINER_NAME:$VERSION-arm64
|
||||
|
||||
docker manifest create \
|
||||
$CONTAINER_NAME:$VERSION \
|
||||
--amend $CONTAINER_NAME:$VERSION-amd64 \
|
||||
--amend $CONTAINER_NAME:$VERSION-arm64
|
||||
docker manifest push $CONTAINER_NAME:$VERSION
|
||||
|
||||
- name: Upload assets
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
export VERSION=${{ steps.vars.outputs.VERSION }}
|
||||
|
||||
mkdir upload
|
||||
|
||||
find ~/artifacts -type f -name "nhost-cli" -exec bash -c 'chmod +x "$0" && mv "$0" "${0//nhost-cli/cli}"' {} \;
|
||||
|
||||
tar cvzf upload/cli-$VERSION-darwin-amd64.tar.gz -C ~/artifacts/cli-$VERSION-darwin-amd64 cli
|
||||
tar cvzf upload/cli-$VERSION-darwin-arm64.tar.gz -C ~/artifacts/cli-$VERSION-darwin-arm64 cli
|
||||
tar cvzf upload/cli-$VERSION-linux-amd64.tar.gz -C ~/artifacts/cli-$VERSION-linux-amd64 cli
|
||||
tar cvzf upload/cli-$VERSION-linux-arm64.tar.gz -C ~/artifacts/cli-$VERSION-linux-arm64 cli
|
||||
|
||||
cd upload
|
||||
find . -type f -exec sha256sum {} + > ../checksums.txt
|
||||
cd ..
|
||||
|
||||
cat checksums.txt
|
||||
|
||||
gh release upload \
|
||||
--clobber "${{ github.ref_name }}" \
|
||||
./upload/* checksums.txt
|
||||
29
cli/Makefile
Normal file
29
cli/Makefile
Normal file
@@ -0,0 +1,29 @@
|
||||
ROOT_DIR?=$(abspath ../)
|
||||
include $(ROOT_DIR)/build/makefiles/general.makefile
|
||||
|
||||
|
||||
.PHONY: _check-pre
|
||||
_check-pre:
|
||||
@sed -i 's/$$NHOST_PAT/$(NHOST_PAT)/' get_access_token.sh
|
||||
|
||||
|
||||
.PHONY: _dev-env-up
|
||||
_dev-env-up:
|
||||
@echo "Nothing to do"
|
||||
|
||||
|
||||
.PHONY: _dev-env-down
|
||||
_dev-env-down:
|
||||
@echo "Nothing to do"
|
||||
|
||||
|
||||
.PHONY: _dev-env-build
|
||||
_dev-env-build:
|
||||
@echo "Nothing to do"
|
||||
|
||||
|
||||
.PHONY: build-multiplatform
|
||||
build-multiplatform: ## Build cli for all supported platforms
|
||||
nix build \
|
||||
--print-build-logs \
|
||||
.\#packages.$(ARCH)-$(OS).cli-multiplatform
|
||||
82
cli/README.md
Normal file
82
cli/README.md
Normal file
@@ -0,0 +1,82 @@
|
||||
<div align="center">
|
||||
<h1 style="font-size: 3em; font-weight: bold;">Nhost CLI</h1>
|
||||
</div>
|
||||
|
||||
[Nhost](http://nhost.io) is an open-source Firebase alternative with GraphQL.
|
||||
|
||||
The Nhost CLI is used to set up a local development environment. This environment will automatically track database migrations and Hasura metadata.
|
||||
|
||||
It's recommended to use the Nhost CLI and the [Nhost GitHub Integration](https://docs.nhost.io/platform/github-integration) to develop locally and automatically deploy changes to production with a git-based workflow (similar to Netlify & Vercel).
|
||||
|
||||
## Services
|
||||
|
||||
- [Nhost Dashboard](https://github.com/nhost/nhost/tree/main/dashboard)
|
||||
- [Postgres Database](https://www.postgresql.org/)
|
||||
- [Hasura's GraphQL Engine](https://github.com/hasura/graphql-engine)
|
||||
- [Hasura Auth](https://github.com/nhost/hasura-auth)
|
||||
- [Hasura Storage](https://github.com/nhost/hasura-storage)
|
||||
- [Nhost Serverless Functions](https://github.com/nhost/functions)
|
||||
- [Minio S3](https://github.com/minio/minio)
|
||||
- [Mailhog](https://github.com/mailhog/MailHog)
|
||||
|
||||
## Get Started
|
||||
|
||||
### Install the Nhost CLI
|
||||
|
||||
```bash
|
||||
sudo curl -L https://raw.githubusercontent.com/nhost/nhost/main/cli/get.sh | bash
|
||||
```
|
||||
|
||||
### Initialize a project
|
||||
|
||||
```bash
|
||||
nhost init
|
||||
```
|
||||
|
||||
### Initialize a project with a remote project as a starting point
|
||||
|
||||
```bash
|
||||
nhost init --remote
|
||||
```
|
||||
|
||||
### Start the development environment
|
||||
|
||||
```bash
|
||||
nhost up
|
||||
```
|
||||
|
||||
### Use the Nhost Dashboard
|
||||
|
||||
```bash
|
||||
nhost up --ui nhost
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
- [Get started with Nhost CLI (longer version)](https://docs.nhost.io/platform/overview/get-started-with-nhost-cli)
|
||||
- [Nhost CLI](https://docs.nhost.io/platform/cli)
|
||||
- [Reference](https://docs.nhost.io/reference/cli)
|
||||
|
||||
## Build from Source
|
||||
|
||||
Make sure you have [Go](https://golang.org/doc/install) 1.18 or later installed.
|
||||
|
||||
The source code includes a self-signed certificate for testing purposes. Nhost workers with configured access to AWS may use the `cert.sh` script to generate a real certificate from Let's Encrypt.
|
||||
|
||||
```bash
|
||||
go build -o /usr/local/bin/nhost
|
||||
```
|
||||
This will build the binary available as the `nhost` command in the terminal.
|
||||
|
||||
## Dependencies
|
||||
|
||||
- [Docker](https://docs.docker.com/get-docker/)
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/)
|
||||
- [curl](https://curl.se/)
|
||||
- [Git](https://git-scm.com/downloads)
|
||||
|
||||
## Supported Platforms
|
||||
|
||||
- MacOS
|
||||
- Linux
|
||||
- Windows WSL2
|
||||
49
cli/cert.sh
Executable file
49
cli/cert.sh
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
certbot certonly \
|
||||
-v \
|
||||
--dns-route53 \
|
||||
-d local.auth.nhost.run \
|
||||
-d local.dashboard.nhost.run \
|
||||
-d local.db.nhost.run \
|
||||
-d local.functions.nhost.run \
|
||||
-d local.graphql.nhost.run \
|
||||
-d local.hasura.nhost.run \
|
||||
-d local.mailhog.nhost.run \
|
||||
-d local.storage.nhost.run \
|
||||
-m 'admin@nhost.io' \
|
||||
--non-interactive \
|
||||
--agree-tos \
|
||||
--server https://acme-v02.api.letsencrypt.org/directory \
|
||||
--logs-dir letsencrypt \
|
||||
--config-dir letsencrypt \
|
||||
--work-dir letsencrypt
|
||||
|
||||
cp letsencrypt/live/local.auth.nhost.run/fullchain.pem ssl/.ssl/local-fullchain.pem
|
||||
cp letsencrypt/live/local.auth.nhost.run/privkey.pem ssl/.ssl/local-privkey.pem
|
||||
|
||||
certbot certonly \
|
||||
-v \
|
||||
--manual \
|
||||
--preferred-challenges dns \
|
||||
-d *.auth.local.nhost.run \
|
||||
-d *.dashboard.local.nhost.run \
|
||||
-d *.db.local.nhost.run \
|
||||
-d *.functions.local.nhost.run \
|
||||
-d *.graphql.local.nhost.run \
|
||||
-d *.hasura.local.nhost.run \
|
||||
-d *.mailhog.local.nhost.run \
|
||||
-d *.storage.local.nhost.run \
|
||||
-m 'admin@nhost.io' \
|
||||
--agree-tos \
|
||||
--server https://acme-v02.api.letsencrypt.org/directory \
|
||||
--logs-dir letsencrypt \
|
||||
--config-dir letsencrypt \
|
||||
--work-dir letsencrypt
|
||||
|
||||
cp letsencrypt/live/auth.local.nhost.run/fullchain.pem ssl/.ssl/sub-fullchain.pem
|
||||
cp letsencrypt/live/auth.local.nhost.run/privkey.pem ssl/.ssl/sub-privkey.pem
|
||||
|
||||
rm -rf letsencrypt
|
||||
126
cli/clienv/clienv.go
Normal file
126
cli/clienv/clienv.go
Normal file
@@ -0,0 +1,126 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient"
|
||||
"github.com/nhost/nhost/cli/nhostclient/graphql"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func sanitizeName(name string) string {
|
||||
re := regexp.MustCompile(`[^a-zA-Z0-9_-]`)
|
||||
return strings.ToLower(re.ReplaceAllString(name, ""))
|
||||
}
|
||||
|
||||
type CliEnv struct {
|
||||
stdout io.Writer
|
||||
stderr io.Writer
|
||||
Path *PathStructure
|
||||
authURL string
|
||||
graphqlURL string
|
||||
branch string
|
||||
nhclient *nhostclient.Client
|
||||
nhpublicclient *nhostclient.Client
|
||||
projectName string
|
||||
localSubdomain string
|
||||
}
|
||||
|
||||
func New(
|
||||
stdout io.Writer,
|
||||
stderr io.Writer,
|
||||
path *PathStructure,
|
||||
authURL string,
|
||||
graphqlURL string,
|
||||
branch string,
|
||||
projectName string,
|
||||
localSubdomain string,
|
||||
) *CliEnv {
|
||||
return &CliEnv{
|
||||
stdout: stdout,
|
||||
stderr: stderr,
|
||||
Path: path,
|
||||
authURL: authURL,
|
||||
graphqlURL: graphqlURL,
|
||||
branch: branch,
|
||||
nhclient: nil,
|
||||
nhpublicclient: nil,
|
||||
projectName: projectName,
|
||||
localSubdomain: localSubdomain,
|
||||
}
|
||||
}
|
||||
|
||||
func FromCLI(cCtx *cli.Context) *CliEnv {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return &CliEnv{
|
||||
stdout: cCtx.App.Writer,
|
||||
stderr: cCtx.App.ErrWriter,
|
||||
Path: NewPathStructure(
|
||||
cwd,
|
||||
cCtx.String(flagRootFolder),
|
||||
cCtx.String(flagDotNhostFolder),
|
||||
cCtx.String(flagNhostFolder),
|
||||
),
|
||||
authURL: cCtx.String(flagAuthURL),
|
||||
graphqlURL: cCtx.String(flagGraphqlURL),
|
||||
branch: cCtx.String(flagBranch),
|
||||
projectName: sanitizeName(cCtx.String(flagProjectName)),
|
||||
nhclient: nil,
|
||||
nhpublicclient: nil,
|
||||
localSubdomain: cCtx.String(flagLocalSubdomain),
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) ProjectName() string {
|
||||
return ce.projectName
|
||||
}
|
||||
|
||||
func (ce *CliEnv) LocalSubdomain() string {
|
||||
return ce.localSubdomain
|
||||
}
|
||||
|
||||
func (ce *CliEnv) AuthURL() string {
|
||||
return ce.authURL
|
||||
}
|
||||
|
||||
func (ce *CliEnv) GraphqlURL() string {
|
||||
return ce.graphqlURL
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Branch() string {
|
||||
return ce.branch
|
||||
}
|
||||
|
||||
func (ce *CliEnv) GetNhostClient(ctx context.Context) (*nhostclient.Client, error) {
|
||||
if ce.nhclient == nil {
|
||||
session, err := ce.LoadSession(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load session: %w", err)
|
||||
}
|
||||
|
||||
ce.nhclient = nhostclient.New(
|
||||
ce.authURL,
|
||||
ce.graphqlURL,
|
||||
graphql.WithAccessToken(session.Session.AccessToken),
|
||||
)
|
||||
}
|
||||
|
||||
return ce.nhclient, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) GetNhostPublicClient() (*nhostclient.Client, error) {
|
||||
if ce.nhpublicclient == nil {
|
||||
ce.nhpublicclient = nhostclient.New(ce.authURL, ce.graphqlURL)
|
||||
}
|
||||
|
||||
return ce.nhpublicclient, nil
|
||||
}
|
||||
101
cli/clienv/filesystem.go
Normal file
101
cli/clienv/filesystem.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
type PathStructure struct {
|
||||
workingDir string
|
||||
root string
|
||||
dotNhostFolder string
|
||||
nhostFolder string
|
||||
}
|
||||
|
||||
func NewPathStructure(
|
||||
workingDir, root, dotNhostFolder, nhostFolder string,
|
||||
) *PathStructure {
|
||||
return &PathStructure{
|
||||
workingDir: workingDir,
|
||||
root: root,
|
||||
dotNhostFolder: dotNhostFolder,
|
||||
nhostFolder: nhostFolder,
|
||||
}
|
||||
}
|
||||
|
||||
func (p PathStructure) WorkingDir() string {
|
||||
return p.workingDir
|
||||
}
|
||||
|
||||
func (p PathStructure) Root() string {
|
||||
return p.root
|
||||
}
|
||||
|
||||
func (p PathStructure) DotNhostFolder() string {
|
||||
return p.dotNhostFolder
|
||||
}
|
||||
|
||||
func (p PathStructure) NhostFolder() string {
|
||||
return p.nhostFolder
|
||||
}
|
||||
|
||||
func (p PathStructure) AuthFile() string {
|
||||
return filepath.Join(PathStateHome(), "auth.json")
|
||||
}
|
||||
|
||||
func (p PathStructure) NhostToml() string {
|
||||
return filepath.Join(p.nhostFolder, "nhost.toml")
|
||||
}
|
||||
|
||||
func (p PathStructure) OverlaysFolder() string {
|
||||
return filepath.Join(p.nhostFolder, "overlays")
|
||||
}
|
||||
|
||||
func (p PathStructure) Overlay(subdomain string) string {
|
||||
return filepath.Join(p.OverlaysFolder(), subdomain+".json")
|
||||
}
|
||||
|
||||
func (p PathStructure) Secrets() string {
|
||||
return filepath.Join(p.root, ".secrets")
|
||||
}
|
||||
|
||||
func (p PathStructure) HasuraConfig() string {
|
||||
return filepath.Join(p.nhostFolder, "config.yaml")
|
||||
}
|
||||
|
||||
func (p PathStructure) ProjectFile() string {
|
||||
return filepath.Join(p.dotNhostFolder, "project.json")
|
||||
}
|
||||
|
||||
func (p PathStructure) DockerCompose() string {
|
||||
return filepath.Join(p.dotNhostFolder, "docker-compose.yaml")
|
||||
}
|
||||
|
||||
func (p PathStructure) Functions() string {
|
||||
return filepath.Join(p.root, "functions")
|
||||
}
|
||||
|
||||
func PathExists(path string) bool {
|
||||
_, err := os.Stat(path)
|
||||
return !os.IsNotExist(err)
|
||||
}
|
||||
|
||||
func PathStateHome() string {
|
||||
var path string
|
||||
if os.Getenv("XDG_STATE_HOME") != "" {
|
||||
path = filepath.Join(os.Getenv("XDG_STATE_HOME"), "nhost")
|
||||
} else {
|
||||
path = filepath.Join(os.Getenv("HOME"), ".nhost", "state")
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
func (p PathStructure) RunServiceOverlaysFolder(configPath string) string {
|
||||
base := filepath.Dir(configPath)
|
||||
return filepath.Join(base, "nhost", "overlays")
|
||||
}
|
||||
|
||||
func (p PathStructure) RunServiceOverlay(configPath, subdomain string) string {
|
||||
return filepath.Join(p.RunServiceOverlaysFolder(configPath), "run-"+subdomain+".json")
|
||||
}
|
||||
108
cli/clienv/flags.go
Normal file
108
cli/clienv/flags.go
Normal file
@@ -0,0 +1,108 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/go-git/go-git/v5"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
flagAuthURL = "auth-url"
|
||||
flagGraphqlURL = "graphql-url"
|
||||
flagBranch = "branch"
|
||||
flagProjectName = "project-name"
|
||||
flagRootFolder = "root-folder"
|
||||
flagNhostFolder = "nhost-folder"
|
||||
flagDotNhostFolder = "dot-nhost-folder"
|
||||
flagLocalSubdomain = "local-subdomain"
|
||||
)
|
||||
|
||||
func getGitBranchName() string {
|
||||
repo, err := git.PlainOpenWithOptions(".", &git.PlainOpenOptions{
|
||||
DetectDotGit: true,
|
||||
EnableDotGitCommonDir: false,
|
||||
})
|
||||
if err != nil {
|
||||
return "nogit"
|
||||
}
|
||||
|
||||
head, err := repo.Head()
|
||||
if err != nil {
|
||||
return "nogit"
|
||||
}
|
||||
|
||||
return head.Name().Short()
|
||||
}
|
||||
|
||||
func Flags() ([]cli.Flag, error) {
|
||||
fullWorkingDir, err := os.Getwd()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get working directory: %w", err)
|
||||
}
|
||||
|
||||
branch := getGitBranchName()
|
||||
|
||||
workingDir := "."
|
||||
dotNhostFolder := filepath.Join(workingDir, ".nhost")
|
||||
nhostFolder := filepath.Join(workingDir, "nhost")
|
||||
|
||||
return []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagAuthURL,
|
||||
Usage: "Nhost auth URL",
|
||||
EnvVars: []string{"NHOST_CLI_AUTH_URL"},
|
||||
Value: "https://otsispdzcwxyqzbfntmj.auth.eu-central-1.nhost.run/v1",
|
||||
Hidden: true,
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagGraphqlURL,
|
||||
Usage: "Nhost GraphQL URL",
|
||||
EnvVars: []string{"NHOST_CLI_GRAPHQL_URL"},
|
||||
Value: "https://otsispdzcwxyqzbfntmj.graphql.eu-central-1.nhost.run/v1",
|
||||
Hidden: true,
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagBranch,
|
||||
Usage: "Git branch name. If not set, it will be detected from the current git repository. This flag is used to dynamically create docker volumes for each branch. If you want to have a static volume name or if you are not using git, set this flag to a static value.", //nolint:lll
|
||||
EnvVars: []string{"BRANCH"},
|
||||
Value: branch,
|
||||
Hidden: false,
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagRootFolder,
|
||||
Usage: "Root folder of project\n\t",
|
||||
EnvVars: []string{"NHOST_ROOT_FOLDER"},
|
||||
Value: workingDir,
|
||||
Category: "Project structure",
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagDotNhostFolder,
|
||||
Usage: "Path to .nhost folder\n\t",
|
||||
EnvVars: []string{"NHOST_DOT_NHOST_FOLDER"},
|
||||
Value: dotNhostFolder,
|
||||
Category: "Project structure",
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagNhostFolder,
|
||||
Usage: "Path to nhost folder\n\t",
|
||||
EnvVars: []string{"NHOST_NHOST_FOLDER"},
|
||||
Value: nhostFolder,
|
||||
Category: "Project structure",
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagProjectName,
|
||||
Usage: "Project name",
|
||||
Value: filepath.Base(fullWorkingDir),
|
||||
EnvVars: []string{"NHOST_PROJECT_NAME"},
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagLocalSubdomain,
|
||||
Usage: "Local subdomain to reach the development environment",
|
||||
Value: "local",
|
||||
EnvVars: []string{"NHOST_LOCAL_SUBDOMAIN"},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
91
cli/clienv/style.go
Normal file
91
cli/clienv/style.go
Normal file
@@ -0,0 +1,91 @@
|
||||
//nolint:gochecknoglobals
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
"syscall"
|
||||
|
||||
"github.com/charmbracelet/lipgloss"
|
||||
"golang.org/x/term"
|
||||
)
|
||||
|
||||
const (
|
||||
ANSIColorWhite = lipgloss.Color("15")
|
||||
ANSIColorCyan = lipgloss.Color("14")
|
||||
ANSIColorPurple = lipgloss.Color("13")
|
||||
ANSIColorBlue = lipgloss.Color("12")
|
||||
ANSIColorYellow = lipgloss.Color("11")
|
||||
ANSIColorGreen = lipgloss.Color("10")
|
||||
ANSIColorRed = lipgloss.Color("9")
|
||||
ANSIColorGray = lipgloss.Color("8")
|
||||
)
|
||||
|
||||
const (
|
||||
IconInfo = "ℹ️"
|
||||
IconWarn = "⚠"
|
||||
)
|
||||
|
||||
var info = lipgloss.NewStyle().
|
||||
Foreground(ANSIColorCyan).
|
||||
Render
|
||||
|
||||
var warn = lipgloss.NewStyle().
|
||||
Foreground(ANSIColorYellow).
|
||||
Render
|
||||
|
||||
var promptMessage = lipgloss.NewStyle().
|
||||
Foreground(ANSIColorCyan).
|
||||
Bold(true).
|
||||
Render
|
||||
|
||||
func (ce *CliEnv) Println(msg string, a ...any) {
|
||||
if _, err := fmt.Fprintln(ce.stdout, fmt.Sprintf(msg, a...)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Infoln(msg string, a ...any) {
|
||||
if _, err := fmt.Fprintln(ce.stdout, info(fmt.Sprintf(msg, a...))); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Warnln(msg string, a ...any) {
|
||||
if _, err := fmt.Fprintln(ce.stdout, warn(fmt.Sprintf(msg, a...))); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) PromptMessage(msg string, a ...any) {
|
||||
if _, err := fmt.Fprint(ce.stdout, promptMessage("- "+fmt.Sprintf(msg, a...))); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (ce *CliEnv) PromptInput(hide bool) (string, error) {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
|
||||
var (
|
||||
response string
|
||||
err error
|
||||
)
|
||||
|
||||
if !hide {
|
||||
response, err = reader.ReadString('\n')
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
} else {
|
||||
output, err := term.ReadPassword(syscall.Stdin)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
|
||||
response = string(output)
|
||||
}
|
||||
|
||||
return strings.TrimSpace(response), err
|
||||
}
|
||||
44
cli/clienv/table.go
Normal file
44
cli/clienv/table.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package clienv
|
||||
|
||||
import "github.com/charmbracelet/lipgloss"
|
||||
|
||||
type Column struct {
|
||||
Header string
|
||||
Rows []string
|
||||
}
|
||||
|
||||
func Table(columns ...Column) string {
|
||||
list := lipgloss.NewStyle().
|
||||
Border(lipgloss.NormalBorder(), false, true, false, false).
|
||||
BorderForeground(ANSIColorGray).
|
||||
Padding(1)
|
||||
// Width(30 + 1) //nolint:mnd
|
||||
|
||||
listHeader := lipgloss.NewStyle().
|
||||
Foreground(ANSIColorPurple).
|
||||
Render
|
||||
|
||||
listItem := lipgloss.NewStyle().Render
|
||||
|
||||
strs := make([]string, len(columns))
|
||||
for i, col := range columns {
|
||||
c := make([]string, len(col.Rows)+1)
|
||||
|
||||
c[0] = listHeader(col.Header)
|
||||
for i, row := range col.Rows {
|
||||
c[i+1] = listItem(row)
|
||||
}
|
||||
|
||||
strs[i] = list.Render(
|
||||
lipgloss.JoinVertical(
|
||||
lipgloss.Left,
|
||||
c...,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
return lipgloss.JoinHorizontal(
|
||||
lipgloss.Top,
|
||||
strs...,
|
||||
)
|
||||
}
|
||||
73
cli/clienv/wf_app_info.go
Normal file
73
cli/clienv/wf_app_info.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient/graphql"
|
||||
)
|
||||
|
||||
func getRemoteAppInfo(
|
||||
ctx context.Context,
|
||||
ce *CliEnv,
|
||||
subdomain string,
|
||||
) (*graphql.AppSummaryFragment, error) {
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
resp, err := cl.GetOrganizationsAndWorkspacesApps(
|
||||
ctx,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get workspaces: %w", err)
|
||||
}
|
||||
|
||||
for _, workspace := range resp.Workspaces {
|
||||
for _, app := range workspace.Apps {
|
||||
if app.Subdomain == subdomain {
|
||||
return app, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, organization := range resp.Organizations {
|
||||
for _, app := range organization.Apps {
|
||||
if app.Subdomain == subdomain {
|
||||
return app, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("failed to find app with subdomain: %s", subdomain) //nolint:err113
|
||||
}
|
||||
|
||||
func (ce *CliEnv) GetAppInfo(
|
||||
ctx context.Context,
|
||||
subdomain string,
|
||||
) (*graphql.AppSummaryFragment, error) {
|
||||
if subdomain != "" {
|
||||
return getRemoteAppInfo(ctx, ce, subdomain)
|
||||
}
|
||||
|
||||
var project *graphql.AppSummaryFragment
|
||||
if err := UnmarshalFile(ce.Path.ProjectFile(), &project, json.Unmarshal); err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
project, err = ce.Link(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
ce.Warnln("Failed to find linked project: %v", err)
|
||||
ce.Infoln("Please run `nhost link` to link a project first")
|
||||
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return project, nil
|
||||
}
|
||||
173
cli/clienv/wf_link.go
Normal file
173
cli/clienv/wf_link.go
Normal file
@@ -0,0 +1,173 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient/graphql"
|
||||
)
|
||||
|
||||
func Printlist(ce *CliEnv, orgs *graphql.GetOrganizationsAndWorkspacesApps) error {
|
||||
if len(orgs.GetWorkspaces())+len(orgs.GetOrganizations()) == 0 {
|
||||
return errors.New("no apps found") //nolint:err113
|
||||
}
|
||||
|
||||
num := Column{
|
||||
Header: "#",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
subdomain := Column{
|
||||
Header: "Subdomain",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
project := Column{
|
||||
Header: "Project",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
organization := Column{
|
||||
Header: "Organization/Workspace",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
region := Column{
|
||||
Header: "Region",
|
||||
Rows: make([]string, 0),
|
||||
}
|
||||
|
||||
for _, org := range orgs.GetOrganizations() {
|
||||
for _, app := range org.Apps {
|
||||
num.Rows = append(num.Rows, strconv.Itoa(len(num.Rows)+1))
|
||||
subdomain.Rows = append(subdomain.Rows, app.Subdomain)
|
||||
project.Rows = append(project.Rows, app.Name)
|
||||
organization.Rows = append(organization.Rows, org.Name)
|
||||
region.Rows = append(region.Rows, app.Region.Name)
|
||||
}
|
||||
}
|
||||
|
||||
for _, ws := range orgs.GetWorkspaces() {
|
||||
for _, app := range ws.Apps {
|
||||
num.Rows = append(num.Rows, strconv.Itoa(len(num.Rows)+1))
|
||||
subdomain.Rows = append(subdomain.Rows, app.Subdomain)
|
||||
project.Rows = append(project.Rows, app.Name)
|
||||
organization.Rows = append(organization.Rows, ws.Name+"*")
|
||||
region.Rows = append(region.Rows, app.Region.Name)
|
||||
}
|
||||
}
|
||||
|
||||
ce.Println("%s", Table(num, subdomain, project, organization, region))
|
||||
ce.Println("* Legacy Workspace")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func confirmApp(ce *CliEnv, app *graphql.AppSummaryFragment) error {
|
||||
ce.PromptMessage("Enter project subdomain to confirm: ")
|
||||
|
||||
confirm, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
|
||||
if confirm != app.Subdomain {
|
||||
return errors.New("input doesn't match the subdomain") //nolint:err113
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getApp(
|
||||
orgs *graphql.GetOrganizationsAndWorkspacesApps,
|
||||
idx string,
|
||||
) (*graphql.AppSummaryFragment, error) {
|
||||
x := 1
|
||||
|
||||
var app *graphql.AppSummaryFragment
|
||||
|
||||
OUTER:
|
||||
|
||||
for _, orgs := range orgs.GetOrganizations() {
|
||||
for _, a := range orgs.GetApps() {
|
||||
if strconv.Itoa(x) == idx {
|
||||
a := a
|
||||
app = a
|
||||
|
||||
break OUTER
|
||||
}
|
||||
|
||||
x++
|
||||
}
|
||||
}
|
||||
|
||||
if app != nil {
|
||||
return app, nil
|
||||
}
|
||||
|
||||
OUTER2:
|
||||
for _, ws := range orgs.GetWorkspaces() {
|
||||
for _, a := range ws.GetApps() {
|
||||
if strconv.Itoa(x) == idx {
|
||||
a := a
|
||||
app = a
|
||||
|
||||
break OUTER2
|
||||
}
|
||||
|
||||
x++
|
||||
}
|
||||
}
|
||||
|
||||
if app == nil {
|
||||
return nil, errors.New("invalid input") //nolint:err113
|
||||
}
|
||||
|
||||
return app, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Link(ctx context.Context) (*graphql.AppSummaryFragment, error) {
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
orgs, err := cl.GetOrganizationsAndWorkspacesApps(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get workspaces: %w", err)
|
||||
}
|
||||
|
||||
if len(orgs.GetWorkspaces())+len(orgs.GetOrganizations()) == 0 {
|
||||
return nil, errors.New("no apps found") //nolint:err113
|
||||
}
|
||||
|
||||
if err := Printlist(ce, orgs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ce.PromptMessage("Select the workspace # to link: ")
|
||||
|
||||
idx, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read workspace: %w", err)
|
||||
}
|
||||
|
||||
app, err := getApp(orgs, idx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := confirmApp(ce, app); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(ce.Path.DotNhostFolder(), 0o755); err != nil { //nolint:mnd
|
||||
return nil, fmt.Errorf("failed to create .nhost folder: %w", err)
|
||||
}
|
||||
|
||||
if err := MarshalFile(app, ce.Path.ProjectFile(), json.Marshal); err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal project information: %w", err)
|
||||
}
|
||||
|
||||
return app, nil
|
||||
}
|
||||
296
cli/clienv/wf_login.go
Normal file
296
cli/clienv/wf_login.go
Normal file
@@ -0,0 +1,296 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto"
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"encoding/json"
|
||||
"encoding/pem"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient"
|
||||
"github.com/nhost/nhost/cli/nhostclient/credentials"
|
||||
"github.com/nhost/nhost/cli/ssl"
|
||||
)
|
||||
|
||||
func savePAT(
|
||||
ce *CliEnv,
|
||||
session credentials.Credentials,
|
||||
) error {
|
||||
dir := filepath.Dir(ce.Path.AuthFile())
|
||||
if !PathExists(dir) {
|
||||
if err := os.MkdirAll(dir, 0o755); err != nil { //nolint:mnd
|
||||
return fmt.Errorf("failed to create dir: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := MarshalFile(session, ce.Path.AuthFile(), json.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to write PAT to file: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func signinHandler(ch chan<- string) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ch <- r.URL.Query().Get("refreshToken")
|
||||
|
||||
fmt.Fprintf(w, "You may now close this window.")
|
||||
}
|
||||
}
|
||||
|
||||
func openBrowser(ctx context.Context, url string) error {
|
||||
var (
|
||||
cmd string
|
||||
args []string
|
||||
)
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "darwin":
|
||||
cmd = "open"
|
||||
default: // "linux", "freebsd", "openbsd", "netbsd"
|
||||
cmd = "xdg-open"
|
||||
}
|
||||
|
||||
args = append(args, url)
|
||||
if err := exec.CommandContext(ctx, cmd, args...).Start(); err != nil {
|
||||
return fmt.Errorf("failed to open browser: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTLSServer() (*http.Server, error) {
|
||||
block, _ := pem.Decode(ssl.LocalKeyFile)
|
||||
// Parse the PEM data to obtain the private key
|
||||
privateKey, err := x509.ParsePKCS8PrivateKey(block.Bytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse private key: %w", err)
|
||||
}
|
||||
|
||||
// Type assert the private key to crypto.PrivateKey
|
||||
pk, ok := privateKey.(crypto.PrivateKey)
|
||||
if !ok {
|
||||
return nil, errors.New( //nolint:err113
|
||||
"failed to type assert private key to crypto.PrivateKey",
|
||||
)
|
||||
}
|
||||
|
||||
block, _ = pem.Decode(ssl.LocalCertFile)
|
||||
|
||||
certificate, err := x509.ParseCertificate(block.Bytes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse certificate: %w", err)
|
||||
}
|
||||
|
||||
tlsConfig := &tls.Config{ //nolint:exhaustruct
|
||||
MinVersion: tls.VersionTLS12,
|
||||
CipherSuites: nil,
|
||||
Certificates: []tls.Certificate{
|
||||
{ //nolint:exhaustruct
|
||||
Certificate: [][]byte{certificate.Raw},
|
||||
PrivateKey: pk,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return &http.Server{ //nolint:exhaustruct
|
||||
Addr: ":8099",
|
||||
TLSConfig: tlsConfig,
|
||||
ReadHeaderTimeout: time.Second * 10, //nolint:mnd
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) loginPAT(pat string) credentials.Credentials {
|
||||
session := credentials.Credentials{
|
||||
ID: "",
|
||||
PersonalAccessToken: pat,
|
||||
}
|
||||
|
||||
return session
|
||||
}
|
||||
|
||||
func (ce *CliEnv) loginEmailPassword(
|
||||
ctx context.Context,
|
||||
email string,
|
||||
password string,
|
||||
) (credentials.Credentials, error) {
|
||||
cl := nhostclient.New(ce.AuthURL(), ce.GraphqlURL())
|
||||
|
||||
var err error
|
||||
|
||||
if email == "" {
|
||||
ce.PromptMessage("email: ")
|
||||
|
||||
email, err = ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to read email: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if password == "" {
|
||||
ce.PromptMessage("password: ")
|
||||
password, err = ce.PromptInput(true)
|
||||
ce.Println("")
|
||||
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to read password: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
ce.Infoln("Authenticating")
|
||||
|
||||
loginResp, err := cl.Login(ctx, email, password)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to login: %w", err)
|
||||
}
|
||||
|
||||
session, err := cl.CreatePAT(ctx, loginResp.Session.AccessToken)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to create PAT: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Successfully logged in")
|
||||
|
||||
return session, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) loginGithub(ctx context.Context) (credentials.Credentials, error) {
|
||||
refreshToken := make(chan string)
|
||||
http.HandleFunc("/signin", signinHandler(refreshToken))
|
||||
|
||||
go func() {
|
||||
server, err := getTLSServer()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if err := server.ListenAndServeTLS("", ""); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}()
|
||||
|
||||
signinPage := ce.AuthURL() + "/signin/provider/github/?redirectTo=https://local.dashboard.local.nhost.run:8099/signin"
|
||||
ce.Infoln("Opening browser to sign-in")
|
||||
|
||||
if err := openBrowser(ctx, signinPage); err != nil {
|
||||
return credentials.Credentials{}, err
|
||||
}
|
||||
|
||||
ce.Infoln("Waiting for sign-in to complete")
|
||||
|
||||
refreshTokenValue := <-refreshToken
|
||||
|
||||
cl := nhostclient.New(ce.AuthURL(), ce.GraphqlURL())
|
||||
|
||||
refreshTokenResp, err := cl.RefreshToken(ctx, refreshTokenValue)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to get access token: %w", err)
|
||||
}
|
||||
|
||||
session, err := cl.CreatePAT(ctx, refreshTokenResp.AccessToken)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to create PAT: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Successfully logged in")
|
||||
|
||||
return session, nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) loginMethod(ctx context.Context) (credentials.Credentials, error) {
|
||||
ce.Infoln("Select authentication method:\n1. PAT\n2. Email/Password\n3. Github")
|
||||
ce.PromptMessage("method: ")
|
||||
|
||||
method, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf(
|
||||
"failed to read authentication method: %w",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
var session credentials.Credentials
|
||||
|
||||
switch method {
|
||||
case "1":
|
||||
ce.PromptMessage("PAT: ")
|
||||
|
||||
pat, err := ce.PromptInput(true)
|
||||
if err != nil {
|
||||
return credentials.Credentials{}, fmt.Errorf("failed to read PAT: %w", err)
|
||||
}
|
||||
|
||||
session = ce.loginPAT(pat)
|
||||
case "2":
|
||||
session, err = ce.loginEmailPassword(ctx, "", "")
|
||||
case "3":
|
||||
session, err = ce.loginGithub(ctx)
|
||||
default:
|
||||
return ce.loginMethod(ctx)
|
||||
}
|
||||
|
||||
return session, err
|
||||
}
|
||||
|
||||
func (ce *CliEnv) verifyEmail(
|
||||
ctx context.Context,
|
||||
email string,
|
||||
) error {
|
||||
ce.Infoln("Your email address is not verified")
|
||||
|
||||
cl := nhostclient.New(ce.AuthURL(), ce.GraphqlURL())
|
||||
if err := cl.VerifyEmail(ctx, email); err != nil {
|
||||
return fmt.Errorf("failed to send verification email: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("A verification email has been sent to %s", email)
|
||||
ce.Infoln("Please verify your email address and try again")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ce *CliEnv) Login(
|
||||
ctx context.Context,
|
||||
pat string,
|
||||
email string,
|
||||
password string,
|
||||
) (credentials.Credentials, error) {
|
||||
var (
|
||||
session credentials.Credentials
|
||||
err error
|
||||
)
|
||||
|
||||
switch {
|
||||
case pat != "":
|
||||
session = ce.loginPAT(pat)
|
||||
case email != "" || password != "":
|
||||
session, err = ce.loginEmailPassword(ctx, email, password)
|
||||
default:
|
||||
session, err = ce.loginMethod(ctx)
|
||||
}
|
||||
|
||||
var reqErr *nhostclient.RequestError
|
||||
if errors.As(err, &reqErr) && reqErr.ErrorCode == "unverified-user" {
|
||||
return credentials.Credentials{}, ce.verifyEmail(ctx, email)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return session, err
|
||||
}
|
||||
|
||||
if err := savePAT(ce, session); err != nil {
|
||||
return credentials.Credentials{}, err
|
||||
}
|
||||
|
||||
return session, nil
|
||||
}
|
||||
52
cli/clienv/wf_marshal.go
Normal file
52
cli/clienv/wf_marshal.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
var ErrNoContent = errors.New("no content")
|
||||
|
||||
func UnmarshalFile(filepath string, v any, f func([]byte, any) error) error {
|
||||
r, err := os.OpenFile(filepath, os.O_RDONLY, 0o600) //nolint:mnd
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open file: %w", err)
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
b, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read contents of reader: %w", err)
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
return ErrNoContent
|
||||
}
|
||||
|
||||
if err := f(b, v); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal object: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func MarshalFile(v any, filepath string, fn func(any) ([]byte, error)) error {
|
||||
f, err := os.OpenFile(filepath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o600) //nolint:mnd
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open file: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
b, err := fn(v)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error marshalling object: %w", err)
|
||||
}
|
||||
|
||||
if _, err := f.Write(b); err != nil {
|
||||
return fmt.Errorf("error writing marshalled object: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
31
cli/clienv/wf_session.go
Normal file
31
cli/clienv/wf_session.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package clienv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/nhost/nhost/cli/nhostclient"
|
||||
"github.com/nhost/nhost/cli/nhostclient/credentials"
|
||||
)
|
||||
|
||||
func (ce *CliEnv) LoadSession(
|
||||
ctx context.Context,
|
||||
) (credentials.Session, error) {
|
||||
var creds credentials.Credentials
|
||||
if err := UnmarshalFile(ce.Path.AuthFile(), &creds, json.Unmarshal); err != nil {
|
||||
creds, err = ce.Login(ctx, "", "", "")
|
||||
if err != nil {
|
||||
return credentials.Session{}, fmt.Errorf("failed to login: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
cl := nhostclient.New(ce.AuthURL(), ce.GraphqlURL())
|
||||
|
||||
session, err := cl.LoginPAT(ctx, creds.PersonalAccessToken)
|
||||
if err != nil {
|
||||
return credentials.Session{}, fmt.Errorf("failed to login: %w", err)
|
||||
}
|
||||
|
||||
return session, nil
|
||||
}
|
||||
102
cli/cmd/config/apply.go
Normal file
102
cli/cmd/config/apply.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func CommandApply() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "apply",
|
||||
Aliases: []string{},
|
||||
Usage: "Apply configuration to cloud project",
|
||||
Action: commandApply,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "Subdomain of the Nhost project to apply configuration to. Defaults to linked project",
|
||||
Required: true,
|
||||
EnvVars: []string{"NHOST_SUBDOMAIN"},
|
||||
},
|
||||
&cli.BoolFlag{ //nolint:exhaustruct
|
||||
Name: flagYes,
|
||||
Usage: "Skip confirmation",
|
||||
EnvVars: []string{"NHOST_YES"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func commandApply(cCtx *cli.Context) error {
|
||||
ce := clienv.FromCLI(cCtx)
|
||||
|
||||
proj, err := ce.GetAppInfo(cCtx.Context, cCtx.String(flagSubdomain))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get app info: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Validating configuration...")
|
||||
|
||||
cfg, _, err := ValidateRemote(
|
||||
cCtx.Context,
|
||||
ce,
|
||||
proj.GetSubdomain(),
|
||||
proj.GetID(),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return Apply(cCtx.Context, ce, proj.ID, cfg, cCtx.Bool(flagYes))
|
||||
}
|
||||
|
||||
func Apply(
|
||||
ctx context.Context,
|
||||
ce *clienv.CliEnv,
|
||||
appID string,
|
||||
cfg *model.ConfigConfig,
|
||||
skipConfirmation bool,
|
||||
) error {
|
||||
if !skipConfirmation {
|
||||
ce.PromptMessage(
|
||||
"We are going to overwrite the project's configuration. Do you want to proceed? [y/N] ",
|
||||
)
|
||||
|
||||
resp, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
|
||||
if resp != "y" && resp != "Y" {
|
||||
return errors.New("aborting") //nolint:err113
|
||||
}
|
||||
}
|
||||
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
b, err := json.Marshal(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal config: %w", err)
|
||||
}
|
||||
|
||||
if _, err := cl.ReplaceConfigRawJSON(
|
||||
ctx,
|
||||
appID,
|
||||
string(b),
|
||||
); err != nil {
|
||||
return fmt.Errorf("failed to apply config: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Configuration applied successfully!")
|
||||
|
||||
return nil
|
||||
}
|
||||
22
cli/cmd/config/config.go
Normal file
22
cli/cmd/config/config.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package config
|
||||
|
||||
import "github.com/urfave/cli/v2"
|
||||
|
||||
const flagSubdomain = "subdomain"
|
||||
|
||||
func Command() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "config",
|
||||
Aliases: []string{},
|
||||
Usage: "Perform config operations",
|
||||
Subcommands: []*cli.Command{
|
||||
CommandDefault(),
|
||||
CommandExample(),
|
||||
CommandApply(),
|
||||
CommandPull(),
|
||||
CommandShow(),
|
||||
CommandValidate(),
|
||||
CommandEdit(),
|
||||
},
|
||||
}
|
||||
}
|
||||
58
cli/cmd/config/default.go
Normal file
58
cli/cmd/config/default.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/project"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func CommandDefault() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "default",
|
||||
Aliases: []string{},
|
||||
Usage: "Create default configuration and secrets",
|
||||
Action: commandDefault,
|
||||
Flags: []cli.Flag{},
|
||||
}
|
||||
}
|
||||
|
||||
func commandDefault(cCtx *cli.Context) error {
|
||||
ce := clienv.FromCLI(cCtx)
|
||||
|
||||
if err := os.MkdirAll(ce.Path.NhostFolder(), 0o755); err != nil { //nolint:mnd
|
||||
return fmt.Errorf("failed to create nhost folder: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Initializing Nhost project")
|
||||
|
||||
if err := InitConfigAndSecrets(ce); err != nil {
|
||||
return fmt.Errorf("failed to initialize project: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Successfully generated default configuration and secrets")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func InitConfigAndSecrets(ce *clienv.CliEnv) error {
|
||||
config, err := project.DefaultConfig()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create default config: %w", err)
|
||||
}
|
||||
|
||||
if err := clienv.MarshalFile(config, ce.Path.NhostToml(), toml.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to save config: %w", err)
|
||||
}
|
||||
|
||||
secrets := project.DefaultSecrets()
|
||||
if err := clienv.MarshalFile(secrets, ce.Path.Secrets(), env.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to save secrets: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
182
cli/cmd/config/edit.go
Normal file
182
cli/cmd/config/edit.go
Normal file
@@ -0,0 +1,182 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v2"
|
||||
"github.com/wI2L/jsondiff"
|
||||
)
|
||||
|
||||
const (
|
||||
flagEditor = "editor"
|
||||
)
|
||||
|
||||
func CommandEdit() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "edit",
|
||||
Aliases: []string{},
|
||||
Usage: "Edit base configuration or an overlay",
|
||||
Action: edit,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "If specified, edit this subdomain's overlay, otherwise edit base configuation",
|
||||
EnvVars: []string{"NHOST_SUBDOMAIN"},
|
||||
},
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagEditor,
|
||||
Usage: "Editor to use",
|
||||
Value: "vim",
|
||||
EnvVars: []string{"EDITOR"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func EditFile(ctx context.Context, editor, filepath string) error {
|
||||
cmd := exec.CommandContext(
|
||||
ctx,
|
||||
editor,
|
||||
filepath,
|
||||
)
|
||||
cmd.Stdin = os.Stdin
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
return fmt.Errorf("failed to open editor: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func CopyConfig[T any](src, dst, overlayPath string) error {
|
||||
var cfg *T
|
||||
if err := clienv.UnmarshalFile(src, &cfg, toml.Unmarshal); err != nil {
|
||||
return fmt.Errorf("failed to parse config: %w", err)
|
||||
}
|
||||
|
||||
var err error
|
||||
if clienv.PathExists(overlayPath) {
|
||||
cfg, err = ApplyJSONPatches(*cfg, overlayPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := clienv.MarshalFile(cfg, dst, toml.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to save temporary file: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readFile(filepath string) (any, error) {
|
||||
f, err := os.Open(filepath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open file: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
b, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
var v any
|
||||
if err := toml.Unmarshal(b, &v); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal toml: %w", err)
|
||||
}
|
||||
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func GenerateJSONPatch(origfilepath, newfilepath, dst string) error {
|
||||
origo, err := readFile(origfilepath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert original toml to json: %w", err)
|
||||
}
|
||||
|
||||
newo, err := readFile(newfilepath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert new toml to json: %w", err)
|
||||
}
|
||||
|
||||
patches, err := jsondiff.Compare(origo, newo)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to generate json patch: %w", err)
|
||||
}
|
||||
|
||||
dstf, err := os.OpenFile(dst, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0o644) //nolint:mnd
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open destination file: %w", err)
|
||||
}
|
||||
defer dstf.Close()
|
||||
|
||||
sort.Slice(patches, func(i, j int) bool {
|
||||
return patches[i].Path < patches[j].Path
|
||||
})
|
||||
|
||||
dstb, err := json.MarshalIndent(patches, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to prettify json: %w", err)
|
||||
}
|
||||
|
||||
if _, err := dstf.Write(dstb); err != nil {
|
||||
return fmt.Errorf("failed to write to destination file: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func edit(cCtx *cli.Context) error {
|
||||
ce := clienv.FromCLI(cCtx)
|
||||
|
||||
if cCtx.String(flagSubdomain) == "" {
|
||||
if err := EditFile(cCtx.Context, cCtx.String(flagEditor), ce.Path.NhostToml()); err != nil {
|
||||
return fmt.Errorf("failed to edit config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(ce.Path.OverlaysFolder(), 0o755); err != nil { //nolint:mnd
|
||||
return fmt.Errorf("failed to create json patches directory: %w", err)
|
||||
}
|
||||
|
||||
tmpdir, err := os.MkdirTemp(os.TempDir(), "nhost-jsonpatch")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create temporary directory: %w", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpdir)
|
||||
|
||||
tmpfileName := filepath.Join(tmpdir, "nhost.toml")
|
||||
|
||||
if err := CopyConfig[model.ConfigConfig](
|
||||
ce.Path.NhostToml(), tmpfileName, ce.Path.Overlay(cCtx.String(flagSubdomain)),
|
||||
); err != nil {
|
||||
return fmt.Errorf("failed to copy config: %w", err)
|
||||
}
|
||||
|
||||
if err := EditFile(cCtx.Context, cCtx.String(flagEditor), tmpfileName); err != nil {
|
||||
return fmt.Errorf("failed to edit config: %w", err)
|
||||
}
|
||||
|
||||
if err := GenerateJSONPatch(
|
||||
ce.Path.NhostToml(), tmpfileName, ce.Path.Overlay(cCtx.String(flagSubdomain)),
|
||||
); err != nil {
|
||||
return fmt.Errorf("failed to generate json patch: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
554
cli/cmd/config/example.go
Normal file
554
cli/cmd/config/example.go
Normal file
@@ -0,0 +1,554 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/be/services/mimir/schema"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func CommandExample() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "example",
|
||||
Aliases: []string{},
|
||||
Usage: "Shows an example config file",
|
||||
Action: commandExample,
|
||||
Flags: []cli.Flag{},
|
||||
}
|
||||
}
|
||||
|
||||
func ptr[T any](v T) *T { return &v }
|
||||
|
||||
func commandExample(cCtx *cli.Context) error { //nolint:funlen,maintidx
|
||||
ce := clienv.FromCLI(cCtx)
|
||||
|
||||
//nolint:mnd
|
||||
cfg := model.ConfigConfig{
|
||||
Global: &model.ConfigGlobal{
|
||||
Environment: []*model.ConfigGlobalEnvironmentVariable{
|
||||
{
|
||||
Name: "NAME",
|
||||
Value: "value",
|
||||
},
|
||||
},
|
||||
},
|
||||
Ai: &model.ConfigAI{
|
||||
Version: ptr("0.3.0"),
|
||||
Resources: &model.ConfigAIResources{
|
||||
Compute: &model.ConfigComputeResources{
|
||||
Cpu: 256,
|
||||
Memory: 512,
|
||||
},
|
||||
},
|
||||
Openai: &model.ConfigAIOpenai{
|
||||
Organization: ptr("org-id"),
|
||||
ApiKey: "opeanai-api-key",
|
||||
},
|
||||
AutoEmbeddings: &model.ConfigAIAutoEmbeddings{
|
||||
SynchPeriodMinutes: ptr(uint32(10)),
|
||||
},
|
||||
WebhookSecret: "this-is-a-webhook-secret",
|
||||
},
|
||||
Graphql: &model.ConfigGraphql{
|
||||
Security: &model.ConfigGraphqlSecurity{
|
||||
ForbidAminSecret: ptr(true),
|
||||
MaxDepthQueries: ptr(uint(4)),
|
||||
},
|
||||
},
|
||||
Hasura: &model.ConfigHasura{
|
||||
Version: new(string),
|
||||
JwtSecrets: []*model.ConfigJWTSecret{
|
||||
{
|
||||
Type: ptr("HS256"),
|
||||
Key: ptr("secret"),
|
||||
},
|
||||
},
|
||||
AdminSecret: "adminsecret",
|
||||
WebhookSecret: "webhooksecret",
|
||||
Settings: &model.ConfigHasuraSettings{
|
||||
CorsDomain: []string{"*"},
|
||||
DevMode: ptr(false),
|
||||
EnableAllowList: ptr(true),
|
||||
EnableConsole: ptr(true),
|
||||
EnableRemoteSchemaPermissions: ptr(true),
|
||||
EnabledAPIs: []string{
|
||||
"metadata",
|
||||
},
|
||||
InferFunctionPermissions: ptr(true),
|
||||
LiveQueriesMultiplexedRefetchInterval: ptr(uint32(1000)),
|
||||
StringifyNumericTypes: ptr(false),
|
||||
},
|
||||
AuthHook: &model.ConfigHasuraAuthHook{
|
||||
Url: "https://customauth.example.com/hook",
|
||||
Mode: ptr("POST"),
|
||||
SendRequestBody: ptr(true),
|
||||
},
|
||||
Logs: &model.ConfigHasuraLogs{
|
||||
Level: ptr("warn"),
|
||||
},
|
||||
Events: &model.ConfigHasuraEvents{
|
||||
HttpPoolSize: ptr(uint32(10)),
|
||||
},
|
||||
Resources: &model.ConfigResources{
|
||||
Compute: &model.ConfigResourcesCompute{
|
||||
Cpu: 500,
|
||||
Memory: 1024,
|
||||
},
|
||||
Replicas: ptr(uint8(1)),
|
||||
Networking: &model.ConfigNetworking{
|
||||
Ingresses: []*model.ConfigIngress{
|
||||
{
|
||||
Fqdn: []string{"hasura.example.com"},
|
||||
Tls: &model.ConfigIngressTls{
|
||||
ClientCA: ptr(
|
||||
"---BEGIN CERTIFICATE---\n...\n---END CERTIFICATE---",
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Autoscaler: nil,
|
||||
},
|
||||
RateLimit: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "15m",
|
||||
},
|
||||
},
|
||||
Functions: &model.ConfigFunctions{
|
||||
Node: &model.ConfigFunctionsNode{
|
||||
Version: ptr(int(22)),
|
||||
},
|
||||
Resources: &model.ConfigFunctionsResources{
|
||||
Networking: &model.ConfigNetworking{
|
||||
Ingresses: []*model.ConfigIngress{
|
||||
{
|
||||
Fqdn: []string{"hasura.example.com"},
|
||||
Tls: &model.ConfigIngressTls{
|
||||
ClientCA: ptr(
|
||||
"---BEGIN CERTIFICATE---\n...\n---END CERTIFICATE---",
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
RateLimit: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "15m",
|
||||
},
|
||||
},
|
||||
Auth: &model.ConfigAuth{
|
||||
Version: ptr("0.25.0"),
|
||||
Misc: &model.ConfigAuthMisc{
|
||||
ConcealErrors: ptr(false),
|
||||
},
|
||||
ElevatedPrivileges: &model.ConfigAuthElevatedPrivileges{
|
||||
Mode: ptr("required"),
|
||||
},
|
||||
Resources: &model.ConfigResources{
|
||||
Compute: &model.ConfigResourcesCompute{
|
||||
Cpu: 250,
|
||||
Memory: 512,
|
||||
},
|
||||
Replicas: ptr(uint8(1)),
|
||||
Networking: &model.ConfigNetworking{
|
||||
Ingresses: []*model.ConfigIngress{
|
||||
{
|
||||
Fqdn: []string{"auth.example.com"},
|
||||
Tls: &model.ConfigIngressTls{
|
||||
ClientCA: ptr(
|
||||
"---BEGIN CERTIFICATE---\n...\n---END CERTIFICATE---",
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Autoscaler: nil,
|
||||
},
|
||||
Redirections: &model.ConfigAuthRedirections{
|
||||
ClientUrl: ptr("https://example.com"),
|
||||
AllowedUrls: []string{
|
||||
"https://example.com",
|
||||
},
|
||||
},
|
||||
SignUp: &model.ConfigAuthSignUp{
|
||||
Enabled: ptr(true),
|
||||
DisableNewUsers: ptr(false),
|
||||
Turnstile: &model.ConfigAuthSignUpTurnstile{
|
||||
SecretKey: "turnstileSecretKey",
|
||||
},
|
||||
},
|
||||
User: &model.ConfigAuthUser{
|
||||
Roles: &model.ConfigAuthUserRoles{
|
||||
Default: ptr("user"),
|
||||
Allowed: []string{"user", "me"},
|
||||
},
|
||||
Locale: &model.ConfigAuthUserLocale{
|
||||
Default: ptr("en"),
|
||||
Allowed: []string{"en"},
|
||||
},
|
||||
Gravatar: &model.ConfigAuthUserGravatar{
|
||||
Enabled: ptr(true),
|
||||
Default: ptr("identicon"),
|
||||
Rating: ptr("g"),
|
||||
},
|
||||
Email: &model.ConfigAuthUserEmail{
|
||||
Allowed: []string{"asd@example.org"},
|
||||
Blocked: []string{"asd@example.com"},
|
||||
},
|
||||
EmailDomains: &model.ConfigAuthUserEmailDomains{
|
||||
Allowed: []string{"example.com"},
|
||||
Blocked: []string{"example.org"},
|
||||
},
|
||||
},
|
||||
Session: &model.ConfigAuthSession{
|
||||
AccessToken: &model.ConfigAuthSessionAccessToken{
|
||||
ExpiresIn: ptr(uint32(3600)),
|
||||
CustomClaims: []*model.ConfigAuthsessionaccessTokenCustomClaims{
|
||||
{
|
||||
Key: "key",
|
||||
Value: "value",
|
||||
Default: ptr("default-value"),
|
||||
},
|
||||
},
|
||||
},
|
||||
RefreshToken: &model.ConfigAuthSessionRefreshToken{
|
||||
ExpiresIn: ptr(uint32(3600)),
|
||||
},
|
||||
},
|
||||
Method: &model.ConfigAuthMethod{
|
||||
Anonymous: &model.ConfigAuthMethodAnonymous{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Otp: &model.ConfigAuthMethodOtp{
|
||||
Email: &model.ConfigAuthMethodOtpEmail{
|
||||
Enabled: ptr(true),
|
||||
},
|
||||
},
|
||||
EmailPasswordless: &model.ConfigAuthMethodEmailPasswordless{
|
||||
Enabled: ptr(true),
|
||||
},
|
||||
EmailPassword: &model.ConfigAuthMethodEmailPassword{
|
||||
HibpEnabled: ptr(true),
|
||||
EmailVerificationRequired: ptr(true),
|
||||
PasswordMinLength: ptr(uint8(12)),
|
||||
},
|
||||
SmsPasswordless: &model.ConfigAuthMethodSmsPasswordless{
|
||||
Enabled: ptr(true),
|
||||
},
|
||||
Oauth: &model.ConfigAuthMethodOauth{
|
||||
Apple: &model.ConfigAuthMethodOauthApple{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
KeyId: ptr("keyid"),
|
||||
TeamId: ptr("teamid"),
|
||||
Scope: []string{"scope"},
|
||||
PrivateKey: ptr("privatekey"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Azuread: &model.ConfigAuthMethodOauthAzuread{
|
||||
Tenant: ptr("tenant"),
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
},
|
||||
Bitbucket: &model.ConfigStandardOauthProvider{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
},
|
||||
Discord: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Entraid: &model.ConfigAuthMethodOauthEntraid{
|
||||
ClientId: ptr("entraidClientId"),
|
||||
ClientSecret: ptr("entraidClientSecret"),
|
||||
Enabled: ptr(true),
|
||||
Tenant: ptr("entraidTenant"),
|
||||
},
|
||||
Facebook: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Github: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Gitlab: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Google: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Linkedin: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Spotify: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Strava: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Twitch: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Twitter: &model.ConfigAuthMethodOauthTwitter{
|
||||
Enabled: ptr(true),
|
||||
ConsumerKey: ptr("consumerkey"),
|
||||
ConsumerSecret: ptr("consumersecret"),
|
||||
},
|
||||
Windowslive: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Scope: []string{"scope"},
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
Audience: ptr("audience"),
|
||||
},
|
||||
Workos: &model.ConfigAuthMethodOauthWorkos{
|
||||
Connection: ptr("connection"),
|
||||
Enabled: ptr(true),
|
||||
ClientId: ptr("clientid"),
|
||||
Organization: ptr("organization"),
|
||||
ClientSecret: ptr("clientsecret"),
|
||||
},
|
||||
},
|
||||
Webauthn: &model.ConfigAuthMethodWebauthn{
|
||||
Enabled: ptr(true),
|
||||
RelyingParty: &model.ConfigAuthMethodWebauthnRelyingParty{
|
||||
Id: ptr("example.com"),
|
||||
Name: ptr("name"),
|
||||
Origins: []string{
|
||||
"https://example.com",
|
||||
},
|
||||
},
|
||||
Attestation: &model.ConfigAuthMethodWebauthnAttestation{
|
||||
Timeout: ptr(uint32(60000)),
|
||||
},
|
||||
},
|
||||
},
|
||||
Totp: &model.ConfigAuthTotp{
|
||||
Enabled: ptr(true),
|
||||
Issuer: ptr("issuer"),
|
||||
},
|
||||
RateLimit: &model.ConfigAuthRateLimit{
|
||||
Emails: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Sms: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
BruteForce: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Signups: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Global: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "15m",
|
||||
},
|
||||
},
|
||||
},
|
||||
Postgres: &model.ConfigPostgres{
|
||||
Version: ptr("14-20230312-1"),
|
||||
Resources: &model.ConfigPostgresResources{
|
||||
Compute: &model.ConfigResourcesCompute{
|
||||
Cpu: 2000,
|
||||
Memory: 4096,
|
||||
},
|
||||
EnablePublicAccess: ptr(true),
|
||||
Storage: &model.ConfigPostgresResourcesStorage{
|
||||
Capacity: 20,
|
||||
},
|
||||
Replicas: nil,
|
||||
},
|
||||
Settings: &model.ConfigPostgresSettings{
|
||||
Jit: ptr("off"),
|
||||
MaxConnections: ptr(int32(100)),
|
||||
SharedBuffers: ptr("128MB"),
|
||||
EffectiveCacheSize: ptr("4GB"),
|
||||
MaintenanceWorkMem: ptr("64MB"),
|
||||
CheckpointCompletionTarget: ptr(float64(0.9)),
|
||||
WalBuffers: ptr("-1"),
|
||||
DefaultStatisticsTarget: ptr(int32(100)),
|
||||
RandomPageCost: ptr(float64(4)),
|
||||
EffectiveIOConcurrency: ptr(int32(1)),
|
||||
WorkMem: ptr("4MB"),
|
||||
HugePages: ptr("try"),
|
||||
MinWalSize: ptr("80MB"),
|
||||
MaxWalSize: ptr("1GB"),
|
||||
MaxWorkerProcesses: ptr(int32(8)),
|
||||
MaxParallelWorkersPerGather: ptr(int32(2)),
|
||||
MaxParallelWorkers: ptr(int32(8)),
|
||||
MaxParallelMaintenanceWorkers: ptr(int32(2)),
|
||||
WalLevel: ptr("replica"),
|
||||
MaxWalSenders: ptr(int32(10)),
|
||||
MaxReplicationSlots: ptr(int32(10)),
|
||||
ArchiveTimeout: ptr(int32(300)),
|
||||
TrackIoTiming: ptr("off"),
|
||||
},
|
||||
Pitr: &model.ConfigPostgresPitr{
|
||||
Retention: ptr(uint8(7)),
|
||||
},
|
||||
},
|
||||
Provider: &model.ConfigProvider{
|
||||
Smtp: &model.ConfigSmtp{
|
||||
User: "smtpUser",
|
||||
Password: "smtpPassword",
|
||||
Sender: "smtpSender",
|
||||
Host: "smtpHost",
|
||||
Port: 587, //nolint:mnd
|
||||
Secure: true,
|
||||
Method: "LOGIN",
|
||||
},
|
||||
Sms: &model.ConfigSms{
|
||||
Provider: ptr("twilio"),
|
||||
AccountSid: "twilioAccountSid",
|
||||
AuthToken: "twilioAuthToken",
|
||||
MessagingServiceId: "twilioMessagingServiceId",
|
||||
},
|
||||
},
|
||||
Storage: &model.ConfigStorage{
|
||||
Version: ptr("0.3.5"),
|
||||
Antivirus: &model.ConfigStorageAntivirus{
|
||||
Server: ptr("tcp://run-clamav:3310"),
|
||||
},
|
||||
Resources: &model.ConfigResources{
|
||||
Compute: &model.ConfigResourcesCompute{
|
||||
Cpu: 500,
|
||||
Memory: 1024,
|
||||
},
|
||||
Networking: nil,
|
||||
Replicas: ptr(uint8(1)),
|
||||
Autoscaler: nil,
|
||||
},
|
||||
RateLimit: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "15m",
|
||||
},
|
||||
},
|
||||
Observability: &model.ConfigObservability{
|
||||
Grafana: &model.ConfigGrafana{
|
||||
AdminPassword: "grafanaAdminPassword",
|
||||
Smtp: &model.ConfigGrafanaSmtp{
|
||||
Host: "localhost",
|
||||
Port: 25,
|
||||
Sender: "admin@localhost",
|
||||
User: "smtpUser",
|
||||
Password: "smtpPassword",
|
||||
},
|
||||
Alerting: &model.ConfigGrafanaAlerting{
|
||||
Enabled: ptr(true),
|
||||
},
|
||||
Contacts: &model.ConfigGrafanaContacts{
|
||||
Emails: []string{
|
||||
"engineering@acme.com",
|
||||
},
|
||||
Pagerduty: []*model.ConfigGrafanacontactsPagerduty{
|
||||
{
|
||||
IntegrationKey: "integration-key",
|
||||
Severity: "critical",
|
||||
Class: "infra",
|
||||
Component: "backend",
|
||||
Group: "group",
|
||||
},
|
||||
},
|
||||
Discord: []*model.ConfigGrafanacontactsDiscord{
|
||||
{
|
||||
Url: "https://discord.com/api/webhooks/...",
|
||||
AvatarUrl: "https://discord.com/api/avatar/...",
|
||||
},
|
||||
},
|
||||
Slack: []*model.ConfigGrafanacontactsSlack{
|
||||
{
|
||||
Recipient: "recipient",
|
||||
Token: "token",
|
||||
Username: "username",
|
||||
IconEmoji: "danger",
|
||||
IconURL: "https://...",
|
||||
MentionUsers: []string{
|
||||
"user1", "user2",
|
||||
},
|
||||
MentionGroups: []string{
|
||||
"group1", "group2",
|
||||
},
|
||||
MentionChannel: "channel",
|
||||
Url: "https://slack.com/api/webhooks/...",
|
||||
EndpointURL: "https://slack.com/api/endpoint/...",
|
||||
},
|
||||
},
|
||||
Webhook: []*model.ConfigGrafanacontactsWebhook{
|
||||
{
|
||||
Url: "https://webhook.example.com",
|
||||
HttpMethod: "POST",
|
||||
Username: "user",
|
||||
Password: "password",
|
||||
AuthorizationScheme: "Bearer",
|
||||
AuthorizationCredentials: "token",
|
||||
MaxAlerts: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
b, err := toml.Marshal(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal config: %w", err)
|
||||
}
|
||||
|
||||
sch, err := schema.New()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create schema: %w", err)
|
||||
}
|
||||
|
||||
if err := sch.ValidateConfig(cfg); err != nil {
|
||||
return fmt.Errorf("failed to validate config: %w", err)
|
||||
}
|
||||
|
||||
ce.Println("%s", b)
|
||||
|
||||
return nil
|
||||
}
|
||||
205
cli/cmd/config/pull.go
Normal file
205
cli/cmd/config/pull.go
Normal file
@@ -0,0 +1,205 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/nhostclient/graphql"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/nhost/nhost/cli/system"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultHasuraGraphqlAdminSecret = "nhost-admin-secret" //nolint:gosec
|
||||
DefaultGraphqlJWTSecret = "0f987876650b4a085e64594fae9219e7781b17506bec02489ad061fba8cb22db"
|
||||
DefaultNhostWebhookSecret = "nhost-webhook-secret" //nolint:gosec
|
||||
)
|
||||
|
||||
const (
|
||||
flagYes = "yes"
|
||||
)
|
||||
|
||||
func CommandPull() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "pull",
|
||||
Aliases: []string{},
|
||||
Usage: "Get cloud configuration",
|
||||
Action: commandPull,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "Pull this subdomain's configuration. Defaults to linked project",
|
||||
EnvVars: []string{"NHOST_SUBDOMAIN"},
|
||||
},
|
||||
&cli.BoolFlag{ //nolint:exhaustruct
|
||||
Name: flagYes,
|
||||
Usage: "Skip confirmation",
|
||||
EnvVars: []string{"NHOST_YES"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func commandPull(cCtx *cli.Context) error {
|
||||
ce := clienv.FromCLI(cCtx)
|
||||
|
||||
skipConfirmation := cCtx.Bool(flagYes)
|
||||
|
||||
if !skipConfirmation {
|
||||
if err := verifyFile(ce, ce.Path.NhostToml()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
writeSecrets := true
|
||||
|
||||
if !skipConfirmation {
|
||||
if err := verifyFile(ce, ce.Path.Secrets()); err != nil {
|
||||
writeSecrets = false
|
||||
}
|
||||
}
|
||||
|
||||
proj, err := ce.GetAppInfo(cCtx.Context, cCtx.String(flagSubdomain))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get app info: %w", err)
|
||||
}
|
||||
|
||||
_, err = Pull(cCtx.Context, ce, proj, writeSecrets)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func verifyFile(ce *clienv.CliEnv, name string) error {
|
||||
if clienv.PathExists(name) {
|
||||
ce.PromptMessage("%s",
|
||||
name+" already exists. Do you want to overwrite it? [y/N] ",
|
||||
)
|
||||
|
||||
resp, err := ce.PromptInput(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read input: %w", err)
|
||||
}
|
||||
|
||||
if resp != "y" && resp != "Y" {
|
||||
return errors.New("aborting") //nolint:err113
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func respToSecrets(env []*graphql.GetSecrets_AppSecrets, anonymize bool) model.Secrets {
|
||||
secrets := make(model.Secrets, len(env))
|
||||
for i, s := range env {
|
||||
if anonymize {
|
||||
switch s.Name {
|
||||
case "HASURA_GRAPHQL_ADMIN_SECRET":
|
||||
s.Value = DefaultHasuraGraphqlAdminSecret
|
||||
case "HASURA_GRAPHQL_JWT_SECRET":
|
||||
s.Value = DefaultGraphqlJWTSecret
|
||||
case "NHOST_WEBHOOK_SECRET":
|
||||
s.Value = DefaultNhostWebhookSecret
|
||||
default:
|
||||
s.Value = "FIXME"
|
||||
}
|
||||
}
|
||||
|
||||
secrets[i] = &model.ConfigEnvironmentVariable{
|
||||
Name: s.Name,
|
||||
Value: s.Value,
|
||||
}
|
||||
}
|
||||
|
||||
return secrets
|
||||
}
|
||||
|
||||
func pullSecrets(
|
||||
ctx context.Context,
|
||||
ce *clienv.CliEnv,
|
||||
proj *graphql.AppSummaryFragment,
|
||||
) error {
|
||||
ce.Infoln("Getting secrets list from Nhost...")
|
||||
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
resp, err := cl.GetSecrets(
|
||||
ctx,
|
||||
proj.ID,
|
||||
)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get secrets: %w", err)
|
||||
}
|
||||
|
||||
secrets := respToSecrets(resp.GetAppSecrets(), true)
|
||||
if err := clienv.MarshalFile(&secrets, ce.Path.Secrets(), env.Marshal); err != nil {
|
||||
return fmt.Errorf("failed to save nhost.toml: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Adding .secrets to .gitignore...")
|
||||
|
||||
if err := system.AddToGitignore("\n.secrets\n"); err != nil {
|
||||
return fmt.Errorf("failed to add .secrets to .gitignore: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Pull(
|
||||
ctx context.Context,
|
||||
ce *clienv.CliEnv,
|
||||
proj *graphql.AppSummaryFragment,
|
||||
writeSecrts bool,
|
||||
) (*model.ConfigConfig, error) {
|
||||
ce.Infoln("Pulling config from Nhost...")
|
||||
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
cfg, err := cl.GetConfigRawJSON(
|
||||
ctx,
|
||||
proj.ID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get config: %w", err)
|
||||
}
|
||||
|
||||
var v model.ConfigConfig
|
||||
if err := json.Unmarshal([]byte(cfg.ConfigRawJSON), &v); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal config: %w", err)
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(ce.Path.NhostFolder(), 0o755); err != nil { //nolint:mnd
|
||||
return nil, fmt.Errorf("failed to create nhost directory: %w", err)
|
||||
}
|
||||
|
||||
if err := clienv.MarshalFile(v, ce.Path.NhostToml(), toml.Marshal); err != nil {
|
||||
return nil, fmt.Errorf("failed to save nhost.toml: %w", err)
|
||||
}
|
||||
|
||||
if writeSecrts {
|
||||
if err := pullSecrets(ctx, ce, proj); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ce.Infoln("Success!")
|
||||
ce.Warnln(
|
||||
"- Review `nhost/nhost.toml` and make sure there are no secrets before you commit it to git.",
|
||||
)
|
||||
ce.Warnln("- Review `.secrets` file and set your development secrets")
|
||||
ce.Warnln("- Review `.secrets` was added to .gitignore")
|
||||
|
||||
return &v, nil
|
||||
}
|
||||
54
cli/cmd/config/show.go
Normal file
54
cli/cmd/config/show.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func CommandShow() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "show",
|
||||
Aliases: []string{},
|
||||
Usage: "Shows configuration after resolving secrets",
|
||||
Description: "Note that this command will always use the local secrets, even if you specify subdomain",
|
||||
Action: commandShow,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "Show this subdomain's rendered configuration. Defaults to base configuration",
|
||||
EnvVars: []string{"NHOST_SUBDOMAIN"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func commandShow(c *cli.Context) error {
|
||||
ce := clienv.FromCLI(c)
|
||||
|
||||
var secrets model.Secrets
|
||||
if err := clienv.UnmarshalFile(ce.Path.Secrets(), &secrets, env.Unmarshal); err != nil {
|
||||
return fmt.Errorf(
|
||||
"failed to parse secrets, make sure secret values are between quotes: %w",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
cfg, err := Validate(ce, c.String(flagSubdomain), secrets)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b, err := toml.Marshal(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error marshalling config: %w", err)
|
||||
}
|
||||
|
||||
ce.Println("%s", b)
|
||||
|
||||
return nil
|
||||
}
|
||||
8
cli/cmd/config/testdata/validate/success/.secrets
vendored
Normal file
8
cli/cmd/config/testdata/validate/success/.secrets
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
HASURA_GRAPHQL_ADMIN_SECRET='nhost-admin-secret'
|
||||
HASURA_GRAPHQL_JWT_SECRET='0f987876650b4a085e64594fae9219e7781b17506bec02489ad061fba8cb22db'
|
||||
NHOST_WEBHOOK_SECRET='nhost-webhook-secret'
|
||||
GRAFANA_ADMIN_PASSWORD='grafana-admin-password'
|
||||
APPLE_CLIENT_ID='clientID'
|
||||
APPLE_KEY_ID='keyID'
|
||||
APPLE_TEAM_ID='teamID'
|
||||
APPLE_PRIVATE_KEY='privateKey'
|
||||
155
cli/cmd/config/testdata/validate/success/nhost/nhost.toml
vendored
Normal file
155
cli/cmd/config/testdata/validate/success/nhost/nhost.toml
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
[global]
|
||||
[[global.environment]]
|
||||
name = 'ENVIRONMENT'
|
||||
value = 'production'
|
||||
|
||||
[hasura]
|
||||
version = 'v2.24.1-ce'
|
||||
adminSecret = '{{ secrets.HASURA_GRAPHQL_ADMIN_SECRET }}'
|
||||
webhookSecret = '{{ secrets.NHOST_WEBHOOK_SECRET }}'
|
||||
|
||||
[[hasura.jwtSecrets]]
|
||||
type = 'HS256'
|
||||
key = '{{ secrets.HASURA_GRAPHQL_JWT_SECRET }}'
|
||||
|
||||
[hasura.settings]
|
||||
corsDomain = ['*']
|
||||
devMode = true
|
||||
enableAllowList = false
|
||||
enableConsole = true
|
||||
enableRemoteSchemaPermissions = false
|
||||
enabledAPIs = ['metadata', 'graphql', 'pgdump', 'config']
|
||||
|
||||
[hasura.logs]
|
||||
level = 'warn'
|
||||
|
||||
[hasura.events]
|
||||
httpPoolSize = 100
|
||||
|
||||
[functions]
|
||||
[functions.node]
|
||||
version = 22
|
||||
|
||||
[auth]
|
||||
version = '0.20.0'
|
||||
|
||||
[auth.redirections]
|
||||
clientUrl = 'https://my.app.com'
|
||||
|
||||
[auth.signUp]
|
||||
enabled = true
|
||||
|
||||
[auth.user]
|
||||
[auth.user.roles]
|
||||
default = 'user'
|
||||
allowed = ['user', 'me']
|
||||
|
||||
[auth.user.locale]
|
||||
default = 'en'
|
||||
allowed = ['en']
|
||||
|
||||
[auth.user.gravatar]
|
||||
enabled = true
|
||||
default = 'blank'
|
||||
rating = 'g'
|
||||
|
||||
[auth.user.email]
|
||||
|
||||
[auth.user.emailDomains]
|
||||
|
||||
[auth.session]
|
||||
[auth.session.accessToken]
|
||||
expiresIn = 900
|
||||
|
||||
[auth.session.refreshToken]
|
||||
expiresIn = 2592000
|
||||
|
||||
[auth.method]
|
||||
[auth.method.anonymous]
|
||||
enabled = false
|
||||
|
||||
[auth.method.emailPasswordless]
|
||||
enabled = false
|
||||
|
||||
[auth.method.emailPassword]
|
||||
hibpEnabled = false
|
||||
emailVerificationRequired = true
|
||||
passwordMinLength = 9
|
||||
|
||||
[auth.method.smsPasswordless]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth]
|
||||
[auth.method.oauth.apple]
|
||||
enabled = true
|
||||
clientId = '{{ secrets.APPLE_CLIENT_ID }}'
|
||||
keyId = '{{ secrets.APPLE_KEY_ID }}'
|
||||
teamId = '{{ secrets.APPLE_TEAM_ID }}'
|
||||
privateKey = '{{ secrets.APPLE_PRIVATE_KEY }}'
|
||||
|
||||
[auth.method.oauth.azuread]
|
||||
tenant = 'common'
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.bitbucket]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.discord]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.facebook]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.github]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.gitlab]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.google]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.linkedin]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.spotify]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.strava]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.twitch]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.twitter]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.windowslive]
|
||||
enabled = false
|
||||
|
||||
[auth.method.oauth.workos]
|
||||
enabled = false
|
||||
|
||||
[auth.method.webauthn]
|
||||
enabled = false
|
||||
|
||||
[auth.method.webauthn.attestation]
|
||||
timeout = 60000
|
||||
|
||||
[auth.totp]
|
||||
enabled = false
|
||||
|
||||
[postgres]
|
||||
version = '14.6-20230406-2'
|
||||
|
||||
[postgres.resources.storage]
|
||||
capacity = 1
|
||||
|
||||
[provider]
|
||||
|
||||
[storage]
|
||||
version = '0.3.4'
|
||||
|
||||
[observability]
|
||||
[observability.grafana]
|
||||
adminPassword = '{{ secrets.GRAFANA_ADMIN_PASSWORD }}'
|
||||
32
cli/cmd/config/testdata/validate/success/nhost/overlays/local.json
vendored
Normal file
32
cli/cmd/config/testdata/validate/success/nhost/overlays/local.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
[
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/hasura/version",
|
||||
"value": "v2.25.0-ce"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/global/environment/0",
|
||||
"value": {
|
||||
"name": "ENVIRONMENT",
|
||||
"value": "development"
|
||||
}
|
||||
},
|
||||
{
|
||||
"op": "add",
|
||||
"path": "/global/environment/-",
|
||||
"value": {
|
||||
"name": "FUNCTION_LOG_LEVEL",
|
||||
"value": "debug"
|
||||
}
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/auth/redirections/clientUrl",
|
||||
"value": "http://localhost:3000"
|
||||
},
|
||||
{
|
||||
"op": "remove",
|
||||
"path": "/auth/method/oauth/apple"
|
||||
}
|
||||
]
|
||||
200
cli/cmd/config/validate.go
Normal file
200
cli/cmd/config/validate.go
Normal file
@@ -0,0 +1,200 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/be/services/mimir/schema"
|
||||
"github.com/nhost/be/services/mimir/schema/appconfig"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/urfave/cli/v2"
|
||||
jsonpatch "gopkg.in/evanphx/json-patch.v5"
|
||||
)
|
||||
|
||||
func CommandValidate() *cli.Command {
|
||||
return &cli.Command{ //nolint:exhaustruct
|
||||
Name: "validate",
|
||||
Aliases: []string{},
|
||||
Usage: "Validate configuration",
|
||||
Action: commandValidate,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint:exhaustruct
|
||||
Name: flagSubdomain,
|
||||
Usage: "Validate this subdomain's configuration. Defaults to linked project",
|
||||
EnvVars: []string{"NHOST_SUBDOMAIN"},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func commandValidate(cCtx *cli.Context) error {
|
||||
ce := clienv.FromCLI(cCtx)
|
||||
|
||||
subdomain := cCtx.String(flagSubdomain)
|
||||
if subdomain != "" && subdomain != "local" {
|
||||
proj, err := ce.GetAppInfo(cCtx.Context, cCtx.String(flagSubdomain))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get app info: %w", err)
|
||||
}
|
||||
|
||||
_, _, err = ValidateRemote(
|
||||
cCtx.Context,
|
||||
ce,
|
||||
proj.GetSubdomain(),
|
||||
proj.GetID(),
|
||||
)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
var secrets model.Secrets
|
||||
if err := clienv.UnmarshalFile(ce.Path.Secrets(), &secrets, env.Unmarshal); err != nil {
|
||||
return fmt.Errorf(
|
||||
"failed to parse secrets, make sure secret values are between quotes: %w",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
ce.Infoln("Verifying configuration...")
|
||||
|
||||
if _, err := Validate(ce, "local", secrets); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ce.Infoln("Configuration is valid!")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ApplyJSONPatches[T any](
|
||||
cfg T,
|
||||
overlayPath string,
|
||||
) (*T, error) {
|
||||
f, err := os.Open(overlayPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to open json patches file: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
patchesb, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read json patches file: %w", err)
|
||||
}
|
||||
|
||||
cfgb, err := json.Marshal(cfg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal config: %w", err)
|
||||
}
|
||||
|
||||
patch, err := jsonpatch.DecodePatch(patchesb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
|
||||
cfgb, err = patch.Apply(cfgb)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
|
||||
var r T
|
||||
if err := json.Unmarshal(cfgb, &r); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal config: %w", err)
|
||||
}
|
||||
|
||||
return &r, nil
|
||||
}
|
||||
|
||||
func Validate(
|
||||
ce *clienv.CliEnv,
|
||||
subdomain string,
|
||||
secrets model.Secrets,
|
||||
) (*model.ConfigConfig, error) {
|
||||
cfg := &model.ConfigConfig{} //nolint:exhaustruct
|
||||
if err := clienv.UnmarshalFile(ce.Path.NhostToml(), cfg, toml.Unmarshal); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse config: %w", err)
|
||||
}
|
||||
|
||||
if clienv.PathExists(ce.Path.Overlay(subdomain)) {
|
||||
var err error
|
||||
|
||||
cfg, err = ApplyJSONPatches(*cfg, ce.Path.Overlay(subdomain))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
schema, err := schema.New()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create schema: %w", err)
|
||||
}
|
||||
|
||||
cfg, err = appconfig.SecretsResolver(cfg, secrets, schema.Fill)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to validate config: %w", err)
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
// ValidateRemote validates the configuration of a remote project by fetching
|
||||
// the secrets and applying them to the configuration. It also applies any
|
||||
// JSON patches from the overlay directory if it exists.
|
||||
// It returns the original configuration with the applied patches (without being filled
|
||||
// and without secrets resolved) and another configuration filled and with secrets resolved.
|
||||
func ValidateRemote(
|
||||
ctx context.Context,
|
||||
ce *clienv.CliEnv,
|
||||
subdomain string,
|
||||
appID string,
|
||||
) (*model.ConfigConfig, *model.ConfigConfig, error) {
|
||||
cfg := &model.ConfigConfig{} //nolint:exhaustruct
|
||||
if err := clienv.UnmarshalFile(ce.Path.NhostToml(), cfg, toml.Unmarshal); err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to parse config: %w", err)
|
||||
}
|
||||
|
||||
if clienv.PathExists(ce.Path.Overlay(subdomain)) {
|
||||
var err error
|
||||
|
||||
cfg, err = ApplyJSONPatches(*cfg, ce.Path.Overlay(subdomain))
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to apply json patches: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
schema, err := schema.New()
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to create schema: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Getting secrets...")
|
||||
|
||||
cl, err := ce.GetNhostClient(ctx)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to get nhost client: %w", err)
|
||||
}
|
||||
|
||||
secretsResp, err := cl.GetSecrets(
|
||||
ctx,
|
||||
appID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to get secrets: %w", err)
|
||||
}
|
||||
|
||||
secrets := respToSecrets(secretsResp.GetAppSecrets(), false)
|
||||
|
||||
cfgSecrets, err := appconfig.SecretsResolver(cfg, secrets, schema.Fill)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to validate config: %w", err)
|
||||
}
|
||||
|
||||
ce.Infoln("Config is valid!")
|
||||
|
||||
return cfg, cfgSecrets, nil
|
||||
}
|
||||
288
cli/cmd/config/validate_test.go
Normal file
288
cli/cmd/config/validate_test.go
Normal file
@@ -0,0 +1,288 @@
|
||||
package config_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/clienv"
|
||||
"github.com/nhost/nhost/cli/cmd/config"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
)
|
||||
|
||||
func ptr[T any](t T) *T {
|
||||
return &t
|
||||
}
|
||||
|
||||
func expectedConfig() *model.ConfigConfig {
|
||||
//nolint:exhaustruct
|
||||
return &model.ConfigConfig{
|
||||
Global: &model.ConfigGlobal{
|
||||
Environment: []*model.ConfigGlobalEnvironmentVariable{
|
||||
{Name: "ENVIRONMENT", Value: "development"},
|
||||
{Name: "FUNCTION_LOG_LEVEL", Value: "debug"},
|
||||
},
|
||||
},
|
||||
Hasura: &model.ConfigHasura{
|
||||
Version: ptr("v2.25.0-ce"),
|
||||
JwtSecrets: []*model.ConfigJWTSecret{
|
||||
{
|
||||
Type: ptr("HS256"),
|
||||
Key: ptr("0f987876650b4a085e64594fae9219e7781b17506bec02489ad061fba8cb22db"),
|
||||
},
|
||||
},
|
||||
AdminSecret: "nhost-admin-secret",
|
||||
WebhookSecret: "nhost-webhook-secret",
|
||||
Settings: &model.ConfigHasuraSettings{
|
||||
CorsDomain: []string{"*"},
|
||||
DevMode: ptr(true),
|
||||
EnableAllowList: ptr(false),
|
||||
EnableConsole: ptr(true),
|
||||
EnableRemoteSchemaPermissions: new(bool),
|
||||
EnabledAPIs: []string{
|
||||
"metadata",
|
||||
"graphql",
|
||||
"pgdump",
|
||||
"config",
|
||||
},
|
||||
InferFunctionPermissions: ptr(true),
|
||||
LiveQueriesMultiplexedRefetchInterval: ptr(uint32(1000)),
|
||||
StringifyNumericTypes: ptr(false),
|
||||
},
|
||||
Logs: &model.ConfigHasuraLogs{Level: ptr("warn")},
|
||||
Events: &model.ConfigHasuraEvents{HttpPoolSize: ptr(uint32(100))},
|
||||
},
|
||||
Functions: &model.ConfigFunctions{Node: &model.ConfigFunctionsNode{Version: ptr(22)}},
|
||||
Auth: &model.ConfigAuth{
|
||||
Version: ptr("0.20.0"),
|
||||
Misc: &model.ConfigAuthMisc{
|
||||
ConcealErrors: ptr(false),
|
||||
},
|
||||
ElevatedPrivileges: &model.ConfigAuthElevatedPrivileges{
|
||||
Mode: ptr("disabled"),
|
||||
},
|
||||
Redirections: &model.ConfigAuthRedirections{
|
||||
ClientUrl: ptr("http://localhost:3000"),
|
||||
AllowedUrls: []string{},
|
||||
},
|
||||
SignUp: &model.ConfigAuthSignUp{
|
||||
Enabled: ptr(true),
|
||||
DisableNewUsers: ptr(false),
|
||||
},
|
||||
User: &model.ConfigAuthUser{
|
||||
Roles: &model.ConfigAuthUserRoles{
|
||||
Default: ptr("user"),
|
||||
Allowed: []string{"user", "me"},
|
||||
},
|
||||
Locale: &model.ConfigAuthUserLocale{
|
||||
Default: ptr("en"),
|
||||
Allowed: []string{"en"},
|
||||
},
|
||||
Gravatar: &model.ConfigAuthUserGravatar{
|
||||
Enabled: ptr(true),
|
||||
Default: ptr("blank"),
|
||||
Rating: ptr("g"),
|
||||
},
|
||||
Email: &model.ConfigAuthUserEmail{
|
||||
Allowed: []string{},
|
||||
Blocked: []string{},
|
||||
},
|
||||
EmailDomains: &model.ConfigAuthUserEmailDomains{
|
||||
Allowed: []string{},
|
||||
Blocked: []string{},
|
||||
},
|
||||
},
|
||||
Session: &model.ConfigAuthSession{
|
||||
AccessToken: &model.ConfigAuthSessionAccessToken{
|
||||
ExpiresIn: ptr(uint32(900)),
|
||||
CustomClaims: []*model.ConfigAuthsessionaccessTokenCustomClaims{},
|
||||
},
|
||||
RefreshToken: &model.ConfigAuthSessionRefreshToken{
|
||||
ExpiresIn: ptr(uint32(2592000)),
|
||||
},
|
||||
},
|
||||
Method: &model.ConfigAuthMethod{
|
||||
Anonymous: &model.ConfigAuthMethodAnonymous{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Otp: &model.ConfigAuthMethodOtp{
|
||||
Email: &model.ConfigAuthMethodOtpEmail{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
},
|
||||
EmailPasswordless: &model.ConfigAuthMethodEmailPasswordless{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
EmailPassword: &model.ConfigAuthMethodEmailPassword{
|
||||
HibpEnabled: ptr(false),
|
||||
EmailVerificationRequired: ptr(true),
|
||||
PasswordMinLength: ptr(uint8(9)),
|
||||
},
|
||||
SmsPasswordless: &model.ConfigAuthMethodSmsPasswordless{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Oauth: &model.ConfigAuthMethodOauth{
|
||||
Apple: &model.ConfigAuthMethodOauthApple{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Azuread: &model.ConfigAuthMethodOauthAzuread{
|
||||
Enabled: ptr(false),
|
||||
Tenant: ptr("common"),
|
||||
},
|
||||
Bitbucket: &model.ConfigStandardOauthProvider{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Discord: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Entraid: &model.ConfigAuthMethodOauthEntraid{
|
||||
Enabled: ptr(false),
|
||||
Tenant: ptr("common"),
|
||||
},
|
||||
Facebook: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Github: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Gitlab: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Google: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Linkedin: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Spotify: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Strava: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Twitch: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Twitter: &model.ConfigAuthMethodOauthTwitter{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Windowslive: &model.ConfigStandardOauthProviderWithScope{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Workos: &model.ConfigAuthMethodOauthWorkos{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
},
|
||||
Webauthn: &model.ConfigAuthMethodWebauthn{
|
||||
Enabled: ptr(false),
|
||||
RelyingParty: nil,
|
||||
Attestation: &model.ConfigAuthMethodWebauthnAttestation{
|
||||
Timeout: ptr(uint32(60000)),
|
||||
},
|
||||
},
|
||||
},
|
||||
Totp: &model.ConfigAuthTotp{Enabled: ptr(false)},
|
||||
RateLimit: &model.ConfigAuthRateLimit{
|
||||
Emails: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "1h",
|
||||
},
|
||||
Sms: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "1h",
|
||||
},
|
||||
BruteForce: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Signups: &model.ConfigRateLimit{
|
||||
Limit: 10,
|
||||
Interval: "5m",
|
||||
},
|
||||
Global: &model.ConfigRateLimit{
|
||||
Limit: 100,
|
||||
Interval: "1m",
|
||||
},
|
||||
},
|
||||
},
|
||||
Postgres: &model.ConfigPostgres{
|
||||
Version: ptr("14.6-20230406-2"),
|
||||
Resources: &model.ConfigPostgresResources{
|
||||
Storage: &model.ConfigPostgresResourcesStorage{
|
||||
Capacity: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
Provider: &model.ConfigProvider{},
|
||||
Storage: &model.ConfigStorage{Version: ptr("0.3.4")},
|
||||
Observability: &model.ConfigObservability{
|
||||
Grafana: &model.ConfigGrafana{
|
||||
AdminPassword: "grafana-admin-password",
|
||||
Smtp: nil,
|
||||
Alerting: &model.ConfigGrafanaAlerting{
|
||||
Enabled: ptr(false),
|
||||
},
|
||||
Contacts: &model.ConfigGrafanaContacts{},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidate(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
path string
|
||||
expected func() *model.ConfigConfig
|
||||
applyPatches bool
|
||||
}{
|
||||
{
|
||||
name: "applypatches",
|
||||
path: "success",
|
||||
expected: expectedConfig,
|
||||
applyPatches: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
ce := clienv.New(
|
||||
os.Stdout,
|
||||
os.Stderr,
|
||||
clienv.NewPathStructure(
|
||||
".",
|
||||
filepath.Join("testdata", "validate", tc.path),
|
||||
filepath.Join("testdata", "validate", tc.path, ".nhost"),
|
||||
filepath.Join("testdata", "validate", tc.path, "nhost"),
|
||||
),
|
||||
"fakeauthurl",
|
||||
"fakegraphqlurl",
|
||||
"fakebranch",
|
||||
"",
|
||||
"local",
|
||||
)
|
||||
|
||||
var secrets model.Secrets
|
||||
if err := clienv.UnmarshalFile(ce.Path.Secrets(), &secrets, env.Unmarshal); err != nil {
|
||||
t.Fatalf(
|
||||
"failed to parse secrets, make sure secret values are between quotes: %s",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
cfg, err := config.Validate(ce, "local", secrets)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(tc.expected(), cfg); diff != "" {
|
||||
t.Errorf("%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
145
cli/cmd/configserver/configserver.go
Normal file
145
cli/cmd/configserver/configserver.go
Normal file
@@ -0,0 +1,145 @@
|
||||
package configserver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/99designs/gqlgen/graphql"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/google/uuid"
|
||||
"github.com/nhost/be/services/mimir/graph"
|
||||
cors "github.com/rs/cors/wrapper/gin"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
const (
|
||||
bindFlag = "bind"
|
||||
debugFlag = "debug"
|
||||
logFormatJSONFlag = "log-format-json"
|
||||
enablePlaygroundFlag = "enable-playground"
|
||||
storageLocalConfigPath = "storage-local-config-path"
|
||||
storageLocalSecretsPath = "storage-local-secrets-path"
|
||||
storageLocalRunServicesPath = "storage-local-run-services-path"
|
||||
)
|
||||
|
||||
func Command() *cli.Command {
|
||||
return &cli.Command{ //nolint: exhaustruct
|
||||
Name: "configserver",
|
||||
Usage: "serve the application",
|
||||
Hidden: true,
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{ //nolint: exhaustruct
|
||||
Name: bindFlag,
|
||||
Usage: "bind address",
|
||||
Value: ":8088",
|
||||
Category: "server",
|
||||
},
|
||||
&cli.BoolFlag{ //nolint: exhaustruct
|
||||
Name: debugFlag,
|
||||
Usage: "enable debug logging",
|
||||
Category: "general",
|
||||
},
|
||||
&cli.BoolFlag{ //nolint: exhaustruct
|
||||
Name: logFormatJSONFlag,
|
||||
Usage: "format logs in JSON",
|
||||
Category: "general",
|
||||
},
|
||||
&cli.BoolFlag{ //nolint: exhaustruct
|
||||
Name: enablePlaygroundFlag,
|
||||
Usage: "enable graphql playground (under /v1)",
|
||||
Category: "server",
|
||||
EnvVars: []string{"ENABLE_PLAYGROUND"},
|
||||
},
|
||||
&cli.StringFlag{ //nolint: exhaustruct
|
||||
Name: storageLocalConfigPath,
|
||||
Usage: "Path to the local mimir config file",
|
||||
Value: "/tmp/root/nhost/nhost.toml",
|
||||
Category: "plugins",
|
||||
EnvVars: []string{"STORAGE_LOCAL_CONFIG_PATH"},
|
||||
},
|
||||
&cli.StringFlag{ //nolint: exhaustruct
|
||||
Name: storageLocalSecretsPath,
|
||||
Usage: "Path to the local mimir secrets file",
|
||||
Value: "/tmp/root/.secrets",
|
||||
Category: "plugins",
|
||||
EnvVars: []string{"STORAGE_LOCAL_SECRETS_PATH"},
|
||||
},
|
||||
&cli.StringSliceFlag{ //nolint: exhaustruct
|
||||
Name: storageLocalRunServicesPath,
|
||||
Usage: "Path to the local mimir run services files",
|
||||
Category: "plugins",
|
||||
EnvVars: []string{"STORAGE_LOCAL_RUN_SERVICES_PATH"},
|
||||
},
|
||||
},
|
||||
Action: serve,
|
||||
}
|
||||
}
|
||||
|
||||
func dummyMiddleware(
|
||||
ctx context.Context,
|
||||
_ any,
|
||||
next graphql.Resolver,
|
||||
) (any, error) {
|
||||
return next(ctx)
|
||||
}
|
||||
|
||||
func dummyMiddleware2(
|
||||
ctx context.Context,
|
||||
_ any,
|
||||
next graphql.Resolver,
|
||||
_ []string,
|
||||
) (any, error) {
|
||||
return next(ctx)
|
||||
}
|
||||
|
||||
func runServicesFiles(runServices ...string) map[string]string {
|
||||
m := make(map[string]string)
|
||||
|
||||
for _, path := range runServices {
|
||||
id := uuid.NewString()
|
||||
m[id] = path
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func serve(cCtx *cli.Context) error {
|
||||
logger := getLogger(cCtx.Bool(debugFlag), cCtx.Bool(logFormatJSONFlag))
|
||||
logger.Info(cCtx.App.Name + " v" + cCtx.App.Version)
|
||||
logFlags(logger, cCtx)
|
||||
|
||||
configFile := cCtx.String(storageLocalConfigPath)
|
||||
secretsFile := cCtx.String(storageLocalSecretsPath)
|
||||
runServices := runServicesFiles(cCtx.StringSlice(storageLocalRunServicesPath)...)
|
||||
|
||||
st := NewLocal(configFile, secretsFile, runServices)
|
||||
|
||||
data, err := st.GetApps(configFile, secretsFile, runServices)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get data from plugin: %w", err)
|
||||
}
|
||||
|
||||
plugins := []graph.Plugin{st}
|
||||
|
||||
resolver, err := graph.NewResolver(data, Querier{}, plugins)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create resolver: %w", err)
|
||||
}
|
||||
|
||||
r := graph.SetupRouter(
|
||||
"/v1/configserver",
|
||||
resolver,
|
||||
dummyMiddleware,
|
||||
dummyMiddleware2,
|
||||
cCtx.Bool(enablePlaygroundFlag),
|
||||
cCtx.App.Version,
|
||||
[]graphql.FieldMiddleware{},
|
||||
gin.Recovery(),
|
||||
cors.Default(),
|
||||
)
|
||||
if err := r.Run(cCtx.String(bindFlag)); err != nil {
|
||||
return fmt.Errorf("failed to run gin: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
220
cli/cmd/configserver/local.go
Normal file
220
cli/cmd/configserver/local.go
Normal file
@@ -0,0 +1,220 @@
|
||||
package configserver
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/nhost/be/services/mimir/graph"
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/project/env"
|
||||
"github.com/pelletier/go-toml/v2"
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const zeroUUID = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
var ErrNotImpl = errors.New("not implemented")
|
||||
|
||||
type Local struct {
|
||||
// we use paths instead of readers/writers because the intention is that these
|
||||
// files will be mounted as volumes in a container and if the file is changed
|
||||
// outside of the container, the filedescriptor might just be pointing to the
|
||||
// old file.
|
||||
config string
|
||||
secrets string
|
||||
runServices map[string]string
|
||||
}
|
||||
|
||||
func NewLocal(config, secrets string, runServices map[string]string) *Local {
|
||||
return &Local{
|
||||
config: config,
|
||||
secrets: secrets,
|
||||
runServices: runServices,
|
||||
}
|
||||
}
|
||||
|
||||
func unmarshal[T any](config any) (*T, error) {
|
||||
b, err := json.Marshal(config)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("problem marshaling cue value: %w", err)
|
||||
}
|
||||
|
||||
var cfg T
|
||||
if err := json.Unmarshal(b, &cfg); err != nil {
|
||||
return nil, fmt.Errorf("problem unmarshaling cue value: %w", err)
|
||||
}
|
||||
|
||||
return &cfg, nil
|
||||
}
|
||||
|
||||
func (l *Local) GetServices(runServices map[string]string) (graph.Services, error) {
|
||||
services := make(graph.Services, 0, len(runServices))
|
||||
for id, r := range runServices {
|
||||
b, err := os.ReadFile(r)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read run service file: %w", err)
|
||||
}
|
||||
|
||||
var cfg model.ConfigRunServiceConfig
|
||||
if err := toml.Unmarshal(b, &cfg); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal run service config: %w", err)
|
||||
}
|
||||
|
||||
services = append(services, &graph.Service{
|
||||
ServiceID: id,
|
||||
Config: &cfg,
|
||||
})
|
||||
}
|
||||
|
||||
return services, nil
|
||||
}
|
||||
|
||||
func (l *Local) GetApps(
|
||||
configFile, secretsFile string, runServicesFiles map[string]string,
|
||||
) ([]*graph.App, error) {
|
||||
b, err := os.ReadFile(configFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read config file: %w", err)
|
||||
}
|
||||
|
||||
var rawCfg any
|
||||
if err := toml.Unmarshal(b, &rawCfg); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal config: %w", err)
|
||||
}
|
||||
|
||||
cfg, err := unmarshal[model.ConfigConfig](rawCfg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fill config: %w", err)
|
||||
}
|
||||
|
||||
b, err = os.ReadFile(secretsFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read secrets file: %w", err)
|
||||
}
|
||||
|
||||
var secrets model.Secrets
|
||||
if err := env.Unmarshal(b, &secrets); err != nil {
|
||||
return nil, fmt.Errorf(
|
||||
"failed to parse secrets, make sure secret values are between quotes: %w",
|
||||
err,
|
||||
)
|
||||
}
|
||||
|
||||
services, err := l.GetServices(runServicesFiles)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get services: %w", err)
|
||||
}
|
||||
|
||||
pgMajorVersion := "14"
|
||||
if cfg.GetPostgres().GetVersion() != nil {
|
||||
pgMajorVersion = strings.Split(*cfg.GetPostgres().GetVersion(), ".")[0]
|
||||
}
|
||||
|
||||
return []*graph.App{
|
||||
{
|
||||
Config: cfg,
|
||||
SystemConfig: &model.ConfigSystemConfig{ //nolint:exhaustruct
|
||||
Postgres: &model.ConfigSystemConfigPostgres{ //nolint:exhaustruct
|
||||
MajorVersion: &pgMajorVersion,
|
||||
Database: "local",
|
||||
ConnectionString: &model.ConfigSystemConfigPostgresConnectionString{
|
||||
Backup: "a",
|
||||
Hasura: "a",
|
||||
Auth: "a",
|
||||
Storage: "a",
|
||||
},
|
||||
},
|
||||
},
|
||||
Secrets: secrets,
|
||||
Services: services,
|
||||
AppID: zeroUUID,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (l *Local) CreateApp(_ context.Context, _ *graph.App, _ logrus.FieldLogger) error {
|
||||
return ErrNotImpl
|
||||
}
|
||||
|
||||
func (l *Local) DeleteApp(_ context.Context, _ *graph.App, _ logrus.FieldLogger) error {
|
||||
return ErrNotImpl
|
||||
}
|
||||
|
||||
func (l *Local) UpdateConfig(_ context.Context, _, newApp *graph.App, _ logrus.FieldLogger) error {
|
||||
b, err := toml.Marshal(newApp.Config)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal app config: %w", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(l.config, b, 0o644); err != nil { //nolint:gosec,mnd
|
||||
return fmt.Errorf("failed to write config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *Local) UpdateSystemConfig(_ context.Context, _, _ *graph.App, _ logrus.FieldLogger) error {
|
||||
return ErrNotImpl
|
||||
}
|
||||
|
||||
func (l *Local) UpdateSecrets(_ context.Context, _, newApp *graph.App, _ logrus.FieldLogger) error {
|
||||
m := make(map[string]string)
|
||||
for _, v := range newApp.Secrets {
|
||||
m[v.Name] = v.Value
|
||||
}
|
||||
|
||||
b, err := toml.Marshal(m)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal app secrets: %w", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(l.secrets, b, 0o644); err != nil { //nolint:gosec,mnd
|
||||
return fmt.Errorf("failed to write secrets: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *Local) CreateRunServiceConfig(
|
||||
_ context.Context, _ string, _ *graph.Service, _ logrus.FieldLogger,
|
||||
) error {
|
||||
return ErrNotImpl
|
||||
}
|
||||
|
||||
func (l *Local) UpdateRunServiceConfig(
|
||||
_ context.Context, _ string, _, newSvc *graph.Service, _ logrus.FieldLogger,
|
||||
) error {
|
||||
wr, ok := l.runServices[newSvc.ServiceID]
|
||||
if !ok {
|
||||
return fmt.Errorf("run service not found: %s", newSvc.ServiceID) //nolint:err113
|
||||
}
|
||||
|
||||
b, err := toml.Marshal(newSvc.Config)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal run service config: %w", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(wr, b, 0o644); err != nil { //nolint:gosec,mnd
|
||||
return fmt.Errorf("failed to write run service config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *Local) DeleteRunServiceConfig(
|
||||
_ context.Context, _ string, _ *graph.Service, _ logrus.FieldLogger,
|
||||
) error {
|
||||
return ErrNotImpl
|
||||
}
|
||||
|
||||
func (l *Local) ChangeDatabaseVersion(
|
||||
_ context.Context,
|
||||
_, _ *graph.App,
|
||||
_ logrus.FieldLogger,
|
||||
) error {
|
||||
return ErrNotImpl
|
||||
}
|
||||
286
cli/cmd/configserver/local_test.go
Normal file
286
cli/cmd/configserver/local_test.go
Normal file
@@ -0,0 +1,286 @@
|
||||
package configserver_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
"github.com/nhost/be/services/mimir/graph"
|
||||
"github.com/nhost/be/services/mimir/model"
|
||||
"github.com/nhost/nhost/cli/cmd/configserver"
|
||||
)
|
||||
|
||||
const rawConfig = `[hasura]
|
||||
adminSecret = 'hasuraAdminSecret'
|
||||
webhookSecret = 'webhookSecret'
|
||||
|
||||
[[hasura.jwtSecrets]]
|
||||
type = 'HS256'
|
||||
key = 'asdasdasdasd'
|
||||
|
||||
[observability]
|
||||
[observability.grafana]
|
||||
adminPassword = 'asdasd'
|
||||
`
|
||||
|
||||
const rawSecrets = `someSecret = 'asdasd'
|
||||
`
|
||||
|
||||
func ptr[T any](v T) *T {
|
||||
return &v
|
||||
}
|
||||
|
||||
func newApp() *graph.App {
|
||||
return &graph.App{
|
||||
Config: &model.ConfigConfig{
|
||||
Global: nil,
|
||||
Graphql: nil,
|
||||
Hasura: &model.ConfigHasura{ //nolint:exhaustruct
|
||||
AdminSecret: "hasuraAdminSecret",
|
||||
WebhookSecret: "webhookSecret",
|
||||
JwtSecrets: []*model.ConfigJWTSecret{
|
||||
{
|
||||
Type: ptr("HS256"),
|
||||
Key: ptr("asdasdasdasd"),
|
||||
},
|
||||
},
|
||||
},
|
||||
Functions: nil,
|
||||
Auth: nil,
|
||||
Postgres: nil,
|
||||
Provider: nil,
|
||||
Storage: nil,
|
||||
Ai: nil,
|
||||
Observability: &model.ConfigObservability{
|
||||
Grafana: &model.ConfigGrafana{
|
||||
AdminPassword: "asdasd",
|
||||
Smtp: nil,
|
||||
Alerting: nil,
|
||||
Contacts: nil,
|
||||
},
|
||||
},
|
||||
},
|
||||
SystemConfig: &model.ConfigSystemConfig{ //nolint:exhaustruct
|
||||
Postgres: &model.ConfigSystemConfigPostgres{ //nolint:exhaustruct
|
||||
MajorVersion: ptr("14"),
|
||||
Database: "local",
|
||||
ConnectionString: &model.ConfigSystemConfigPostgresConnectionString{
|
||||
Backup: "a",
|
||||
Hasura: "a",
|
||||
Auth: "a",
|
||||
Storage: "a",
|
||||
},
|
||||
},
|
||||
},
|
||||
Secrets: []*model.ConfigEnvironmentVariable{
|
||||
{
|
||||
Name: "someSecret",
|
||||
Value: "asdasd",
|
||||
},
|
||||
},
|
||||
Services: graph.Services{},
|
||||
AppID: "00000000-0000-0000-0000-000000000000",
|
||||
}
|
||||
}
|
||||
|
||||
func TestLocalGetApps(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
configRaw string
|
||||
secretsRaw string
|
||||
expected []*graph.App
|
||||
}{
|
||||
{
|
||||
name: "works",
|
||||
configRaw: rawConfig,
|
||||
secretsRaw: rawSecrets,
|
||||
expected: []*graph.App{newApp()},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
configF, err := os.CreateTemp(t.TempDir(), "TestLocalGetApps")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(configF.Name())
|
||||
|
||||
if _, err := configF.WriteString(tc.configRaw); err != nil {
|
||||
t.Fatalf("failed to write to temp file: %v", err)
|
||||
}
|
||||
|
||||
secretsF, err := os.CreateTemp(t.TempDir(), "TestLocalGetApps")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(secretsF.Name())
|
||||
|
||||
if _, err := secretsF.WriteString(tc.secretsRaw); err != nil {
|
||||
t.Fatalf("failed to write to temp file: %v", err)
|
||||
}
|
||||
|
||||
st := configserver.NewLocal(
|
||||
configF.Name(),
|
||||
secretsF.Name(),
|
||||
nil,
|
||||
)
|
||||
|
||||
got, err := st.GetApps(configF.Name(), secretsF.Name(), nil)
|
||||
if err != nil {
|
||||
t.Errorf("GetApps() got error: %v", err)
|
||||
}
|
||||
|
||||
cmpOpts := cmpopts.IgnoreUnexported(graph.App{}) //nolint:exhaustruct
|
||||
|
||||
if diff := cmp.Diff(tc.expected, got, cmpOpts); diff != "" {
|
||||
t.Errorf("GetApps() mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLocalUpdateConfig(t *testing.T) { //nolint:dupl
|
||||
t.Parallel()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
configRaw string
|
||||
secretsRaw string
|
||||
newApp *graph.App
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "works",
|
||||
configRaw: rawConfig,
|
||||
secretsRaw: rawSecrets,
|
||||
newApp: newApp(),
|
||||
expected: rawConfig,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
configF, err := os.CreateTemp(t.TempDir(), "TestLocalGetApps")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(configF.Name())
|
||||
|
||||
if _, err := configF.WriteString(tc.configRaw); err != nil {
|
||||
t.Fatalf("failed to write to temp file: %v", err)
|
||||
}
|
||||
|
||||
secretsF, err := os.CreateTemp(t.TempDir(), "TestLocalGetApps")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(secretsF.Name())
|
||||
|
||||
if _, err := secretsF.WriteString(tc.secretsRaw); err != nil {
|
||||
t.Fatalf("failed to write to temp file: %v", err)
|
||||
}
|
||||
|
||||
st := configserver.NewLocal(
|
||||
configF.Name(),
|
||||
secretsF.Name(),
|
||||
nil,
|
||||
)
|
||||
|
||||
if err := st.UpdateConfig(
|
||||
t.Context(),
|
||||
nil,
|
||||
tc.newApp,
|
||||
nil,
|
||||
); err != nil {
|
||||
t.Errorf("UpdateConfig() got error: %v", err)
|
||||
}
|
||||
|
||||
b, err := os.ReadFile(configF.Name())
|
||||
if err != nil {
|
||||
t.Errorf("failed to read config file: %v", err)
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(tc.expected, string(b)); diff != "" {
|
||||
t.Errorf("UpdateConfig() mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLocalUpdateSecrets(t *testing.T) { //nolint:dupl
|
||||
t.Parallel()
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
configRaw string
|
||||
secretsRaw string
|
||||
newApp *graph.App
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "works",
|
||||
configRaw: rawConfig,
|
||||
secretsRaw: rawSecrets,
|
||||
newApp: newApp(),
|
||||
expected: rawSecrets,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
configF, err := os.CreateTemp(t.TempDir(), "TestLocalGetApps")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(configF.Name())
|
||||
|
||||
if _, err := configF.WriteString(tc.configRaw); err != nil {
|
||||
t.Fatalf("failed to write to temp file: %v", err)
|
||||
}
|
||||
|
||||
secretsF, err := os.CreateTemp(t.TempDir(), "TestLocalGetApps")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(secretsF.Name())
|
||||
|
||||
if _, err := secretsF.WriteString(tc.secretsRaw); err != nil {
|
||||
t.Fatalf("failed to write to temp file: %v", err)
|
||||
}
|
||||
|
||||
st := configserver.NewLocal(
|
||||
configF.Name(),
|
||||
secretsF.Name(),
|
||||
nil,
|
||||
)
|
||||
|
||||
if err := st.UpdateSecrets(
|
||||
t.Context(),
|
||||
nil,
|
||||
tc.newApp,
|
||||
nil,
|
||||
); err != nil {
|
||||
t.Errorf("UpdateSecrets() got error: %v", err)
|
||||
}
|
||||
|
||||
b, err := os.ReadFile(secretsF.Name())
|
||||
if err != nil {
|
||||
t.Errorf("failed to read config file: %v", err)
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(tc.expected, string(b)); diff != "" {
|
||||
t.Errorf("UpdateSecrets() mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
54
cli/cmd/configserver/logger.go
Normal file
54
cli/cmd/configserver/logger.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package configserver
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/sirupsen/logrus"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func getLogger(debug bool, formatJSON bool) *logrus.Logger {
|
||||
logger := logrus.New()
|
||||
if formatJSON {
|
||||
logger.Formatter = &logrus.JSONFormatter{} //nolint: exhaustruct
|
||||
} else {
|
||||
logger.SetFormatter(&logrus.TextFormatter{ //nolint: exhaustruct
|
||||
FullTimestamp: true,
|
||||
})
|
||||
}
|
||||
|
||||
if debug {
|
||||
logger.SetLevel(logrus.DebugLevel)
|
||||
gin.SetMode(gin.DebugMode)
|
||||
} else {
|
||||
logger.SetLevel(logrus.InfoLevel)
|
||||
gin.SetMode(gin.ReleaseMode)
|
||||
}
|
||||
|
||||
return logger
|
||||
}
|
||||
|
||||
func logFlags(logger logrus.FieldLogger, cCtx *cli.Context) {
|
||||
fields := logrus.Fields{}
|
||||
|
||||
for _, flag := range cCtx.App.Flags {
|
||||
name := flag.Names()[0]
|
||||
fields[name] = cCtx.Generic(name)
|
||||
}
|
||||
|
||||
for _, flag := range cCtx.Command.Flags {
|
||||
name := flag.Names()[0]
|
||||
if strings.Contains(name, "pass") ||
|
||||
strings.Contains(name, "token") ||
|
||||
strings.Contains(name, "secret") ||
|
||||
strings.Contains(name, "key") {
|
||||
fields[name] = "******"
|
||||
continue
|
||||
}
|
||||
|
||||
fields[name] = cCtx.Generic(name)
|
||||
}
|
||||
|
||||
logger.WithFields(fields).Info("started with settings")
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user