Files
nhost/examples/guides/codegen-nhost/schema.graphql

10144 lines
204 KiB
GraphQL

schema {
query: query_root
mutation: mutation_root
subscription: subscription_root
}
"""
whether this query should be cached (Hasura Cloud only)
"""
directive @cached(
"""
refresh the cache entry
"""
refresh: Boolean! = false
"""
measured in seconds
"""
ttl: Int! = 60
) on QUERY
"""
Boolean expression to compare columns of type "Boolean". All fields are combined with logical 'AND'.
"""
input Boolean_comparison_exp {
_eq: Boolean
_gt: Boolean
_gte: Boolean
_in: [Boolean!]
_is_null: Boolean
_lt: Boolean
_lte: Boolean
_neq: Boolean
_nin: [Boolean!]
}
"""
Boolean expression to compare columns of type "Int". All fields are combined with logical 'AND'.
"""
input Int_comparison_exp {
_eq: Int
_gt: Int
_gte: Int
_in: [Int!]
_is_null: Boolean
_lt: Int
_lte: Int
_neq: Int
_nin: [Int!]
}
"""
Boolean expression to compare columns of type "String". All fields are combined with logical 'AND'.
"""
input String_comparison_exp {
_eq: String
_gt: String
_gte: String
"""
does the column match the given case-insensitive pattern
"""
_ilike: String
_in: [String!]
"""
does the column match the given POSIX regular expression, case insensitive
"""
_iregex: String
_is_null: Boolean
"""
does the column match the given pattern
"""
_like: String
_lt: String
_lte: String
_neq: String
"""
does the column NOT match the given case-insensitive pattern
"""
_nilike: String
_nin: [String!]
"""
does the column NOT match the given POSIX regular expression, case insensitive
"""
_niregex: String
"""
does the column NOT match the given pattern
"""
_nlike: String
"""
does the column NOT match the given POSIX regular expression, case sensitive
"""
_nregex: String
"""
does the column NOT match the given SQL regular expression
"""
_nsimilar: String
"""
does the column match the given POSIX regular expression, case sensitive
"""
_regex: String
"""
does the column match the given SQL regular expression
"""
_similar: String
}
"""
columns and relationships of "attachments"
"""
type attachments {
"""
An object relationship
"""
file: files!
file_id: uuid!
"""
An object relationship
"""
task: tasks!
taskID: uuid!
}
"""
aggregated selection of "attachments"
"""
type attachments_aggregate {
aggregate: attachments_aggregate_fields
nodes: [attachments!]!
}
"""
aggregate fields of "attachments"
"""
type attachments_aggregate_fields {
count(columns: [attachments_select_column!], distinct: Boolean): Int!
max: attachments_max_fields
min: attachments_min_fields
}
"""
Boolean expression to filter rows from the table "attachments". All fields are combined with a logical 'AND'.
"""
input attachments_bool_exp {
_and: [attachments_bool_exp!]
_not: attachments_bool_exp
_or: [attachments_bool_exp!]
file: files_bool_exp
file_id: uuid_comparison_exp
task: tasks_bool_exp
taskID: uuid_comparison_exp
}
"""
unique or primary key constraints on table "attachments"
"""
enum attachments_constraint {
"""
unique or primary key constraint on columns "task_id", "file_id"
"""
attachments_pkey
}
"""
input type for inserting data into table "attachments"
"""
input attachments_insert_input {
file: files_obj_rel_insert_input
file_id: uuid
task: tasks_obj_rel_insert_input
taskID: uuid
}
"""
aggregate max on columns
"""
type attachments_max_fields {
file_id: uuid
taskID: uuid
}
"""
aggregate min on columns
"""
type attachments_min_fields {
file_id: uuid
taskID: uuid
}
"""
response of any mutation on the table "attachments"
"""
type attachments_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [attachments!]!
}
"""
on_conflict condition type for table "attachments"
"""
input attachments_on_conflict {
constraint: attachments_constraint!
update_columns: [attachments_update_column!]! = []
where: attachments_bool_exp
}
"""
Ordering options when selecting data from "attachments".
"""
input attachments_order_by {
file: files_order_by
file_id: order_by
task: tasks_order_by
taskID: order_by
}
"""
primary key columns input for table: attachments
"""
input attachments_pk_columns_input {
file_id: uuid!
taskID: uuid!
}
"""
select columns of table "attachments"
"""
enum attachments_select_column {
"""
column name
"""
file_id
"""
column name
"""
taskID
}
"""
input type for updating data in table "attachments"
"""
input attachments_set_input {
file_id: uuid
taskID: uuid
}
"""
Streaming cursor of the table "attachments"
"""
input attachments_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: attachments_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input attachments_stream_cursor_value_input {
file_id: uuid
taskID: uuid
}
"""
update columns of table "attachments"
"""
enum attachments_update_column {
"""
column name
"""
file_id
"""
column name
"""
taskID
}
input attachments_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: attachments_set_input
"""
filter the rows which have to be updated
"""
where: attachments_bool_exp!
}
"""
Oauth requests, inserted before redirecting to the provider's site. Don't modify its structure as Hasura Auth relies on it to function properly.
"""
type authProviderRequests {
id: uuid!
options(
"""
JSON select path
"""
path: String
): jsonb
}
"""
aggregated selection of "auth.provider_requests"
"""
type authProviderRequests_aggregate {
aggregate: authProviderRequests_aggregate_fields
nodes: [authProviderRequests!]!
}
"""
aggregate fields of "auth.provider_requests"
"""
type authProviderRequests_aggregate_fields {
count(columns: [authProviderRequests_select_column!], distinct: Boolean): Int!
max: authProviderRequests_max_fields
min: authProviderRequests_min_fields
}
"""
append existing jsonb value of filtered columns with new jsonb value
"""
input authProviderRequests_append_input {
options: jsonb
}
"""
Boolean expression to filter rows from the table "auth.provider_requests". All fields are combined with a logical 'AND'.
"""
input authProviderRequests_bool_exp {
_and: [authProviderRequests_bool_exp!]
_not: authProviderRequests_bool_exp
_or: [authProviderRequests_bool_exp!]
id: uuid_comparison_exp
options: jsonb_comparison_exp
}
"""
unique or primary key constraints on table "auth.provider_requests"
"""
enum authProviderRequests_constraint {
"""
unique or primary key constraint on columns "id"
"""
provider_requests_pkey
}
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
input authProviderRequests_delete_at_path_input {
options: [String!]
}
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
input authProviderRequests_delete_elem_input {
options: Int
}
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
input authProviderRequests_delete_key_input {
options: String
}
"""
input type for inserting data into table "auth.provider_requests"
"""
input authProviderRequests_insert_input {
id: uuid
options: jsonb
}
"""
aggregate max on columns
"""
type authProviderRequests_max_fields {
id: uuid
}
"""
aggregate min on columns
"""
type authProviderRequests_min_fields {
id: uuid
}
"""
response of any mutation on the table "auth.provider_requests"
"""
type authProviderRequests_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [authProviderRequests!]!
}
"""
on_conflict condition type for table "auth.provider_requests"
"""
input authProviderRequests_on_conflict {
constraint: authProviderRequests_constraint!
update_columns: [authProviderRequests_update_column!]! = []
where: authProviderRequests_bool_exp
}
"""
Ordering options when selecting data from "auth.provider_requests".
"""
input authProviderRequests_order_by {
id: order_by
options: order_by
}
"""
primary key columns input for table: auth.provider_requests
"""
input authProviderRequests_pk_columns_input {
id: uuid!
}
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
input authProviderRequests_prepend_input {
options: jsonb
}
"""
select columns of table "auth.provider_requests"
"""
enum authProviderRequests_select_column {
"""
column name
"""
id
"""
column name
"""
options
}
"""
input type for updating data in table "auth.provider_requests"
"""
input authProviderRequests_set_input {
id: uuid
options: jsonb
}
"""
Streaming cursor of the table "authProviderRequests"
"""
input authProviderRequests_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: authProviderRequests_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input authProviderRequests_stream_cursor_value_input {
id: uuid
options: jsonb
}
"""
update columns of table "auth.provider_requests"
"""
enum authProviderRequests_update_column {
"""
column name
"""
id
"""
column name
"""
options
}
input authProviderRequests_updates {
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: authProviderRequests_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: authProviderRequests_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: authProviderRequests_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: authProviderRequests_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: authProviderRequests_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authProviderRequests_set_input
"""
filter the rows which have to be updated
"""
where: authProviderRequests_bool_exp!
}
"""
List of available Oauth providers. Don't modify its structure as Hasura Auth relies on it to function properly.
"""
type authProviders {
id: String!
"""
An array relationship
"""
userProviders(
"""
distinct select on columns
"""
distinct_on: [authUserProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserProviders_order_by!]
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): [authUserProviders!]!
"""
An aggregate relationship
"""
userProviders_aggregate(
"""
distinct select on columns
"""
distinct_on: [authUserProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserProviders_order_by!]
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): authUserProviders_aggregate!
}
"""
aggregated selection of "auth.providers"
"""
type authProviders_aggregate {
aggregate: authProviders_aggregate_fields
nodes: [authProviders!]!
}
"""
aggregate fields of "auth.providers"
"""
type authProviders_aggregate_fields {
count(columns: [authProviders_select_column!], distinct: Boolean): Int!
max: authProviders_max_fields
min: authProviders_min_fields
}
"""
Boolean expression to filter rows from the table "auth.providers". All fields are combined with a logical 'AND'.
"""
input authProviders_bool_exp {
_and: [authProviders_bool_exp!]
_not: authProviders_bool_exp
_or: [authProviders_bool_exp!]
id: String_comparison_exp
userProviders: authUserProviders_bool_exp
userProviders_aggregate: authUserProviders_aggregate_bool_exp
}
"""
unique or primary key constraints on table "auth.providers"
"""
enum authProviders_constraint {
"""
unique or primary key constraint on columns "id"
"""
providers_pkey
}
"""
input type for inserting data into table "auth.providers"
"""
input authProviders_insert_input {
id: String
userProviders: authUserProviders_arr_rel_insert_input
}
"""
aggregate max on columns
"""
type authProviders_max_fields {
id: String
}
"""
aggregate min on columns
"""
type authProviders_min_fields {
id: String
}
"""
response of any mutation on the table "auth.providers"
"""
type authProviders_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [authProviders!]!
}
"""
input type for inserting object relation for remote table "auth.providers"
"""
input authProviders_obj_rel_insert_input {
data: authProviders_insert_input!
"""
upsert condition
"""
on_conflict: authProviders_on_conflict
}
"""
on_conflict condition type for table "auth.providers"
"""
input authProviders_on_conflict {
constraint: authProviders_constraint!
update_columns: [authProviders_update_column!]! = []
where: authProviders_bool_exp
}
"""
Ordering options when selecting data from "auth.providers".
"""
input authProviders_order_by {
id: order_by
userProviders_aggregate: authUserProviders_aggregate_order_by
}
"""
primary key columns input for table: auth.providers
"""
input authProviders_pk_columns_input {
id: String!
}
"""
select columns of table "auth.providers"
"""
enum authProviders_select_column {
"""
column name
"""
id
}
"""
input type for updating data in table "auth.providers"
"""
input authProviders_set_input {
id: String
}
"""
Streaming cursor of the table "authProviders"
"""
input authProviders_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: authProviders_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input authProviders_stream_cursor_value_input {
id: String
}
"""
update columns of table "auth.providers"
"""
enum authProviders_update_column {
"""
column name
"""
id
}
input authProviders_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: authProviders_set_input
"""
filter the rows which have to be updated
"""
where: authProviders_bool_exp!
}
"""
columns and relationships of "auth.refresh_token_types"
"""
type authRefreshTokenTypes {
comment: String
"""
An array relationship
"""
refreshTokens(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokens_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokens_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): [authRefreshTokens!]!
"""
An aggregate relationship
"""
refreshTokens_aggregate(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokens_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokens_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): authRefreshTokens_aggregate!
value: String!
}
"""
aggregated selection of "auth.refresh_token_types"
"""
type authRefreshTokenTypes_aggregate {
aggregate: authRefreshTokenTypes_aggregate_fields
nodes: [authRefreshTokenTypes!]!
}
"""
aggregate fields of "auth.refresh_token_types"
"""
type authRefreshTokenTypes_aggregate_fields {
count(
columns: [authRefreshTokenTypes_select_column!]
distinct: Boolean
): Int!
max: authRefreshTokenTypes_max_fields
min: authRefreshTokenTypes_min_fields
}
"""
Boolean expression to filter rows from the table "auth.refresh_token_types". All fields are combined with a logical 'AND'.
"""
input authRefreshTokenTypes_bool_exp {
_and: [authRefreshTokenTypes_bool_exp!]
_not: authRefreshTokenTypes_bool_exp
_or: [authRefreshTokenTypes_bool_exp!]
comment: String_comparison_exp
refreshTokens: authRefreshTokens_bool_exp
refreshTokens_aggregate: authRefreshTokens_aggregate_bool_exp
value: String_comparison_exp
}
"""
unique or primary key constraints on table "auth.refresh_token_types"
"""
enum authRefreshTokenTypes_constraint {
"""
unique or primary key constraint on columns "value"
"""
refresh_token_types_pkey
}
enum authRefreshTokenTypes_enum {
"""
Personal access token
"""
pat
"""
Regular refresh token
"""
regular
}
"""
Boolean expression to compare columns of type "authRefreshTokenTypes_enum". All fields are combined with logical 'AND'.
"""
input authRefreshTokenTypes_enum_comparison_exp {
_eq: authRefreshTokenTypes_enum
_in: [authRefreshTokenTypes_enum!]
_is_null: Boolean
_neq: authRefreshTokenTypes_enum
_nin: [authRefreshTokenTypes_enum!]
}
"""
input type for inserting data into table "auth.refresh_token_types"
"""
input authRefreshTokenTypes_insert_input {
comment: String
refreshTokens: authRefreshTokens_arr_rel_insert_input
value: String
}
"""
aggregate max on columns
"""
type authRefreshTokenTypes_max_fields {
comment: String
value: String
}
"""
aggregate min on columns
"""
type authRefreshTokenTypes_min_fields {
comment: String
value: String
}
"""
response of any mutation on the table "auth.refresh_token_types"
"""
type authRefreshTokenTypes_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [authRefreshTokenTypes!]!
}
"""
on_conflict condition type for table "auth.refresh_token_types"
"""
input authRefreshTokenTypes_on_conflict {
constraint: authRefreshTokenTypes_constraint!
update_columns: [authRefreshTokenTypes_update_column!]! = []
where: authRefreshTokenTypes_bool_exp
}
"""
Ordering options when selecting data from "auth.refresh_token_types".
"""
input authRefreshTokenTypes_order_by {
comment: order_by
refreshTokens_aggregate: authRefreshTokens_aggregate_order_by
value: order_by
}
"""
primary key columns input for table: auth.refresh_token_types
"""
input authRefreshTokenTypes_pk_columns_input {
value: String!
}
"""
select columns of table "auth.refresh_token_types"
"""
enum authRefreshTokenTypes_select_column {
"""
column name
"""
comment
"""
column name
"""
value
}
"""
input type for updating data in table "auth.refresh_token_types"
"""
input authRefreshTokenTypes_set_input {
comment: String
value: String
}
"""
Streaming cursor of the table "authRefreshTokenTypes"
"""
input authRefreshTokenTypes_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: authRefreshTokenTypes_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input authRefreshTokenTypes_stream_cursor_value_input {
comment: String
value: String
}
"""
update columns of table "auth.refresh_token_types"
"""
enum authRefreshTokenTypes_update_column {
"""
column name
"""
comment
"""
column name
"""
value
}
input authRefreshTokenTypes_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: authRefreshTokenTypes_set_input
"""
filter the rows which have to be updated
"""
where: authRefreshTokenTypes_bool_exp!
}
"""
User refresh tokens. Hasura auth uses them to rotate new access tokens as long as the refresh token is not expired. Don't modify its structure as Hasura Auth relies on it to function properly.
"""
type authRefreshTokens {
createdAt: timestamptz!
expiresAt: timestamptz!
id: uuid!
metadata(
"""
JSON select path
"""
path: String
): jsonb
refreshTokenHash: String
type: authRefreshTokenTypes_enum!
"""
An object relationship
"""
user: users!
userId: uuid!
}
"""
aggregated selection of "auth.refresh_tokens"
"""
type authRefreshTokens_aggregate {
aggregate: authRefreshTokens_aggregate_fields
nodes: [authRefreshTokens!]!
}
input authRefreshTokens_aggregate_bool_exp {
count: authRefreshTokens_aggregate_bool_exp_count
}
input authRefreshTokens_aggregate_bool_exp_count {
arguments: [authRefreshTokens_select_column!]
distinct: Boolean
filter: authRefreshTokens_bool_exp
predicate: Int_comparison_exp!
}
"""
aggregate fields of "auth.refresh_tokens"
"""
type authRefreshTokens_aggregate_fields {
count(columns: [authRefreshTokens_select_column!], distinct: Boolean): Int!
max: authRefreshTokens_max_fields
min: authRefreshTokens_min_fields
}
"""
order by aggregate values of table "auth.refresh_tokens"
"""
input authRefreshTokens_aggregate_order_by {
count: order_by
max: authRefreshTokens_max_order_by
min: authRefreshTokens_min_order_by
}
"""
append existing jsonb value of filtered columns with new jsonb value
"""
input authRefreshTokens_append_input {
metadata: jsonb
}
"""
input type for inserting array relation for remote table "auth.refresh_tokens"
"""
input authRefreshTokens_arr_rel_insert_input {
data: [authRefreshTokens_insert_input!]!
"""
upsert condition
"""
on_conflict: authRefreshTokens_on_conflict
}
"""
Boolean expression to filter rows from the table "auth.refresh_tokens". All fields are combined with a logical 'AND'.
"""
input authRefreshTokens_bool_exp {
_and: [authRefreshTokens_bool_exp!]
_not: authRefreshTokens_bool_exp
_or: [authRefreshTokens_bool_exp!]
createdAt: timestamptz_comparison_exp
expiresAt: timestamptz_comparison_exp
id: uuid_comparison_exp
metadata: jsonb_comparison_exp
refreshTokenHash: String_comparison_exp
type: authRefreshTokenTypes_enum_comparison_exp
user: users_bool_exp
userId: uuid_comparison_exp
}
"""
unique or primary key constraints on table "auth.refresh_tokens"
"""
enum authRefreshTokens_constraint {
"""
unique or primary key constraint on columns "id"
"""
refresh_tokens_pkey
}
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
input authRefreshTokens_delete_at_path_input {
metadata: [String!]
}
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
input authRefreshTokens_delete_elem_input {
metadata: Int
}
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
input authRefreshTokens_delete_key_input {
metadata: String
}
"""
input type for inserting data into table "auth.refresh_tokens"
"""
input authRefreshTokens_insert_input {
createdAt: timestamptz
expiresAt: timestamptz
id: uuid
metadata: jsonb
refreshTokenHash: String
type: authRefreshTokenTypes_enum
user: users_obj_rel_insert_input
userId: uuid
}
"""
aggregate max on columns
"""
type authRefreshTokens_max_fields {
createdAt: timestamptz
expiresAt: timestamptz
id: uuid
refreshTokenHash: String
userId: uuid
}
"""
order by max() on columns of table "auth.refresh_tokens"
"""
input authRefreshTokens_max_order_by {
createdAt: order_by
expiresAt: order_by
id: order_by
refreshTokenHash: order_by
userId: order_by
}
"""
aggregate min on columns
"""
type authRefreshTokens_min_fields {
createdAt: timestamptz
expiresAt: timestamptz
id: uuid
refreshTokenHash: String
userId: uuid
}
"""
order by min() on columns of table "auth.refresh_tokens"
"""
input authRefreshTokens_min_order_by {
createdAt: order_by
expiresAt: order_by
id: order_by
refreshTokenHash: order_by
userId: order_by
}
"""
response of any mutation on the table "auth.refresh_tokens"
"""
type authRefreshTokens_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [authRefreshTokens!]!
}
"""
on_conflict condition type for table "auth.refresh_tokens"
"""
input authRefreshTokens_on_conflict {
constraint: authRefreshTokens_constraint!
update_columns: [authRefreshTokens_update_column!]! = []
where: authRefreshTokens_bool_exp
}
"""
Ordering options when selecting data from "auth.refresh_tokens".
"""
input authRefreshTokens_order_by {
createdAt: order_by
expiresAt: order_by
id: order_by
metadata: order_by
refreshTokenHash: order_by
type: order_by
user: users_order_by
userId: order_by
}
"""
primary key columns input for table: auth.refresh_tokens
"""
input authRefreshTokens_pk_columns_input {
id: uuid!
}
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
input authRefreshTokens_prepend_input {
metadata: jsonb
}
"""
select columns of table "auth.refresh_tokens"
"""
enum authRefreshTokens_select_column {
"""
column name
"""
createdAt
"""
column name
"""
expiresAt
"""
column name
"""
id
"""
column name
"""
metadata
"""
column name
"""
refreshTokenHash
"""
column name
"""
type
"""
column name
"""
userId
}
"""
input type for updating data in table "auth.refresh_tokens"
"""
input authRefreshTokens_set_input {
createdAt: timestamptz
expiresAt: timestamptz
id: uuid
metadata: jsonb
refreshTokenHash: String
type: authRefreshTokenTypes_enum
userId: uuid
}
"""
Streaming cursor of the table "authRefreshTokens"
"""
input authRefreshTokens_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: authRefreshTokens_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input authRefreshTokens_stream_cursor_value_input {
createdAt: timestamptz
expiresAt: timestamptz
id: uuid
metadata: jsonb
refreshTokenHash: String
type: authRefreshTokenTypes_enum
userId: uuid
}
"""
update columns of table "auth.refresh_tokens"
"""
enum authRefreshTokens_update_column {
"""
column name
"""
createdAt
"""
column name
"""
expiresAt
"""
column name
"""
id
"""
column name
"""
metadata
"""
column name
"""
refreshTokenHash
"""
column name
"""
type
"""
column name
"""
userId
}
input authRefreshTokens_updates {
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: authRefreshTokens_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: authRefreshTokens_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: authRefreshTokens_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: authRefreshTokens_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: authRefreshTokens_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authRefreshTokens_set_input
"""
filter the rows which have to be updated
"""
where: authRefreshTokens_bool_exp!
}
"""
Persistent Hasura roles for users. Don't modify its structure as Hasura Auth relies on it to function properly.
"""
type authRoles {
role: String!
"""
An array relationship
"""
userRoles(
"""
distinct select on columns
"""
distinct_on: [authUserRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserRoles_order_by!]
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): [authUserRoles!]!
"""
An aggregate relationship
"""
userRoles_aggregate(
"""
distinct select on columns
"""
distinct_on: [authUserRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserRoles_order_by!]
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): authUserRoles_aggregate!
"""
An array relationship
"""
usersByDefaultRole(
"""
distinct select on columns
"""
distinct_on: [users_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [users_order_by!]
"""
filter the rows returned
"""
where: users_bool_exp
): [users!]!
"""
An aggregate relationship
"""
usersByDefaultRole_aggregate(
"""
distinct select on columns
"""
distinct_on: [users_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [users_order_by!]
"""
filter the rows returned
"""
where: users_bool_exp
): users_aggregate!
}
"""
aggregated selection of "auth.roles"
"""
type authRoles_aggregate {
aggregate: authRoles_aggregate_fields
nodes: [authRoles!]!
}
"""
aggregate fields of "auth.roles"
"""
type authRoles_aggregate_fields {
count(columns: [authRoles_select_column!], distinct: Boolean): Int!
max: authRoles_max_fields
min: authRoles_min_fields
}
"""
Boolean expression to filter rows from the table "auth.roles". All fields are combined with a logical 'AND'.
"""
input authRoles_bool_exp {
_and: [authRoles_bool_exp!]
_not: authRoles_bool_exp
_or: [authRoles_bool_exp!]
role: String_comparison_exp
userRoles: authUserRoles_bool_exp
userRoles_aggregate: authUserRoles_aggregate_bool_exp
usersByDefaultRole: users_bool_exp
usersByDefaultRole_aggregate: users_aggregate_bool_exp
}
"""
unique or primary key constraints on table "auth.roles"
"""
enum authRoles_constraint {
"""
unique or primary key constraint on columns "role"
"""
roles_pkey
}
"""
input type for inserting data into table "auth.roles"
"""
input authRoles_insert_input {
role: String
userRoles: authUserRoles_arr_rel_insert_input
usersByDefaultRole: users_arr_rel_insert_input
}
"""
aggregate max on columns
"""
type authRoles_max_fields {
role: String
}
"""
aggregate min on columns
"""
type authRoles_min_fields {
role: String
}
"""
response of any mutation on the table "auth.roles"
"""
type authRoles_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [authRoles!]!
}
"""
input type for inserting object relation for remote table "auth.roles"
"""
input authRoles_obj_rel_insert_input {
data: authRoles_insert_input!
"""
upsert condition
"""
on_conflict: authRoles_on_conflict
}
"""
on_conflict condition type for table "auth.roles"
"""
input authRoles_on_conflict {
constraint: authRoles_constraint!
update_columns: [authRoles_update_column!]! = []
where: authRoles_bool_exp
}
"""
Ordering options when selecting data from "auth.roles".
"""
input authRoles_order_by {
role: order_by
userRoles_aggregate: authUserRoles_aggregate_order_by
usersByDefaultRole_aggregate: users_aggregate_order_by
}
"""
primary key columns input for table: auth.roles
"""
input authRoles_pk_columns_input {
role: String!
}
"""
select columns of table "auth.roles"
"""
enum authRoles_select_column {
"""
column name
"""
role
}
"""
input type for updating data in table "auth.roles"
"""
input authRoles_set_input {
role: String
}
"""
Streaming cursor of the table "authRoles"
"""
input authRoles_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: authRoles_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input authRoles_stream_cursor_value_input {
role: String
}
"""
update columns of table "auth.roles"
"""
enum authRoles_update_column {
"""
column name
"""
role
}
input authRoles_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: authRoles_set_input
"""
filter the rows which have to be updated
"""
where: authRoles_bool_exp!
}
"""
Active providers for a given user. Don't modify its structure as Hasura Auth relies on it to function properly.
"""
type authUserProviders {
accessToken: String!
createdAt: timestamptz!
id: uuid!
"""
An object relationship
"""
provider: authProviders!
providerId: String!
providerUserId: String!
refreshToken: String
updatedAt: timestamptz!
"""
An object relationship
"""
user: users!
userId: uuid!
}
"""
aggregated selection of "auth.user_providers"
"""
type authUserProviders_aggregate {
aggregate: authUserProviders_aggregate_fields
nodes: [authUserProviders!]!
}
input authUserProviders_aggregate_bool_exp {
count: authUserProviders_aggregate_bool_exp_count
}
input authUserProviders_aggregate_bool_exp_count {
arguments: [authUserProviders_select_column!]
distinct: Boolean
filter: authUserProviders_bool_exp
predicate: Int_comparison_exp!
}
"""
aggregate fields of "auth.user_providers"
"""
type authUserProviders_aggregate_fields {
count(columns: [authUserProviders_select_column!], distinct: Boolean): Int!
max: authUserProviders_max_fields
min: authUserProviders_min_fields
}
"""
order by aggregate values of table "auth.user_providers"
"""
input authUserProviders_aggregate_order_by {
count: order_by
max: authUserProviders_max_order_by
min: authUserProviders_min_order_by
}
"""
input type for inserting array relation for remote table "auth.user_providers"
"""
input authUserProviders_arr_rel_insert_input {
data: [authUserProviders_insert_input!]!
"""
upsert condition
"""
on_conflict: authUserProviders_on_conflict
}
"""
Boolean expression to filter rows from the table "auth.user_providers". All fields are combined with a logical 'AND'.
"""
input authUserProviders_bool_exp {
_and: [authUserProviders_bool_exp!]
_not: authUserProviders_bool_exp
_or: [authUserProviders_bool_exp!]
accessToken: String_comparison_exp
createdAt: timestamptz_comparison_exp
id: uuid_comparison_exp
provider: authProviders_bool_exp
providerId: String_comparison_exp
providerUserId: String_comparison_exp
refreshToken: String_comparison_exp
updatedAt: timestamptz_comparison_exp
user: users_bool_exp
userId: uuid_comparison_exp
}
"""
unique or primary key constraints on table "auth.user_providers"
"""
enum authUserProviders_constraint {
"""
unique or primary key constraint on columns "id"
"""
user_providers_pkey
"""
unique or primary key constraint on columns "provider_user_id", "provider_id"
"""
user_providers_provider_id_provider_user_id_key
}
"""
input type for inserting data into table "auth.user_providers"
"""
input authUserProviders_insert_input {
accessToken: String
createdAt: timestamptz
id: uuid
provider: authProviders_obj_rel_insert_input
providerId: String
providerUserId: String
refreshToken: String
updatedAt: timestamptz
user: users_obj_rel_insert_input
userId: uuid
}
"""
aggregate max on columns
"""
type authUserProviders_max_fields {
accessToken: String
createdAt: timestamptz
id: uuid
providerId: String
providerUserId: String
refreshToken: String
updatedAt: timestamptz
userId: uuid
}
"""
order by max() on columns of table "auth.user_providers"
"""
input authUserProviders_max_order_by {
accessToken: order_by
createdAt: order_by
id: order_by
providerId: order_by
providerUserId: order_by
refreshToken: order_by
updatedAt: order_by
userId: order_by
}
"""
aggregate min on columns
"""
type authUserProviders_min_fields {
accessToken: String
createdAt: timestamptz
id: uuid
providerId: String
providerUserId: String
refreshToken: String
updatedAt: timestamptz
userId: uuid
}
"""
order by min() on columns of table "auth.user_providers"
"""
input authUserProviders_min_order_by {
accessToken: order_by
createdAt: order_by
id: order_by
providerId: order_by
providerUserId: order_by
refreshToken: order_by
updatedAt: order_by
userId: order_by
}
"""
response of any mutation on the table "auth.user_providers"
"""
type authUserProviders_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [authUserProviders!]!
}
"""
on_conflict condition type for table "auth.user_providers"
"""
input authUserProviders_on_conflict {
constraint: authUserProviders_constraint!
update_columns: [authUserProviders_update_column!]! = []
where: authUserProviders_bool_exp
}
"""
Ordering options when selecting data from "auth.user_providers".
"""
input authUserProviders_order_by {
accessToken: order_by
createdAt: order_by
id: order_by
provider: authProviders_order_by
providerId: order_by
providerUserId: order_by
refreshToken: order_by
updatedAt: order_by
user: users_order_by
userId: order_by
}
"""
primary key columns input for table: auth.user_providers
"""
input authUserProviders_pk_columns_input {
id: uuid!
}
"""
select columns of table "auth.user_providers"
"""
enum authUserProviders_select_column {
"""
column name
"""
accessToken
"""
column name
"""
createdAt
"""
column name
"""
id
"""
column name
"""
providerId
"""
column name
"""
providerUserId
"""
column name
"""
refreshToken
"""
column name
"""
updatedAt
"""
column name
"""
userId
}
"""
input type for updating data in table "auth.user_providers"
"""
input authUserProviders_set_input {
accessToken: String
createdAt: timestamptz
id: uuid
providerId: String
providerUserId: String
refreshToken: String
updatedAt: timestamptz
userId: uuid
}
"""
Streaming cursor of the table "authUserProviders"
"""
input authUserProviders_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: authUserProviders_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input authUserProviders_stream_cursor_value_input {
accessToken: String
createdAt: timestamptz
id: uuid
providerId: String
providerUserId: String
refreshToken: String
updatedAt: timestamptz
userId: uuid
}
"""
update columns of table "auth.user_providers"
"""
enum authUserProviders_update_column {
"""
column name
"""
accessToken
"""
column name
"""
createdAt
"""
column name
"""
id
"""
column name
"""
providerId
"""
column name
"""
providerUserId
"""
column name
"""
refreshToken
"""
column name
"""
updatedAt
"""
column name
"""
userId
}
input authUserProviders_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserProviders_set_input
"""
filter the rows which have to be updated
"""
where: authUserProviders_bool_exp!
}
"""
Roles of users. Don't modify its structure as Hasura Auth relies on it to function properly.
"""
type authUserRoles {
createdAt: timestamptz!
id: uuid!
role: String!
"""
An object relationship
"""
roleByRole: authRoles!
"""
An object relationship
"""
user: users!
userId: uuid!
}
"""
aggregated selection of "auth.user_roles"
"""
type authUserRoles_aggregate {
aggregate: authUserRoles_aggregate_fields
nodes: [authUserRoles!]!
}
input authUserRoles_aggregate_bool_exp {
count: authUserRoles_aggregate_bool_exp_count
}
input authUserRoles_aggregate_bool_exp_count {
arguments: [authUserRoles_select_column!]
distinct: Boolean
filter: authUserRoles_bool_exp
predicate: Int_comparison_exp!
}
"""
aggregate fields of "auth.user_roles"
"""
type authUserRoles_aggregate_fields {
count(columns: [authUserRoles_select_column!], distinct: Boolean): Int!
max: authUserRoles_max_fields
min: authUserRoles_min_fields
}
"""
order by aggregate values of table "auth.user_roles"
"""
input authUserRoles_aggregate_order_by {
count: order_by
max: authUserRoles_max_order_by
min: authUserRoles_min_order_by
}
"""
input type for inserting array relation for remote table "auth.user_roles"
"""
input authUserRoles_arr_rel_insert_input {
data: [authUserRoles_insert_input!]!
"""
upsert condition
"""
on_conflict: authUserRoles_on_conflict
}
"""
Boolean expression to filter rows from the table "auth.user_roles". All fields are combined with a logical 'AND'.
"""
input authUserRoles_bool_exp {
_and: [authUserRoles_bool_exp!]
_not: authUserRoles_bool_exp
_or: [authUserRoles_bool_exp!]
createdAt: timestamptz_comparison_exp
id: uuid_comparison_exp
role: String_comparison_exp
roleByRole: authRoles_bool_exp
user: users_bool_exp
userId: uuid_comparison_exp
}
"""
unique or primary key constraints on table "auth.user_roles"
"""
enum authUserRoles_constraint {
"""
unique or primary key constraint on columns "id"
"""
user_roles_pkey
"""
unique or primary key constraint on columns "user_id", "role"
"""
user_roles_user_id_role_key
}
"""
input type for inserting data into table "auth.user_roles"
"""
input authUserRoles_insert_input {
createdAt: timestamptz
id: uuid
role: String
roleByRole: authRoles_obj_rel_insert_input
user: users_obj_rel_insert_input
userId: uuid
}
"""
aggregate max on columns
"""
type authUserRoles_max_fields {
createdAt: timestamptz
id: uuid
role: String
userId: uuid
}
"""
order by max() on columns of table "auth.user_roles"
"""
input authUserRoles_max_order_by {
createdAt: order_by
id: order_by
role: order_by
userId: order_by
}
"""
aggregate min on columns
"""
type authUserRoles_min_fields {
createdAt: timestamptz
id: uuid
role: String
userId: uuid
}
"""
order by min() on columns of table "auth.user_roles"
"""
input authUserRoles_min_order_by {
createdAt: order_by
id: order_by
role: order_by
userId: order_by
}
"""
response of any mutation on the table "auth.user_roles"
"""
type authUserRoles_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [authUserRoles!]!
}
"""
on_conflict condition type for table "auth.user_roles"
"""
input authUserRoles_on_conflict {
constraint: authUserRoles_constraint!
update_columns: [authUserRoles_update_column!]! = []
where: authUserRoles_bool_exp
}
"""
Ordering options when selecting data from "auth.user_roles".
"""
input authUserRoles_order_by {
createdAt: order_by
id: order_by
role: order_by
roleByRole: authRoles_order_by
user: users_order_by
userId: order_by
}
"""
primary key columns input for table: auth.user_roles
"""
input authUserRoles_pk_columns_input {
id: uuid!
}
"""
select columns of table "auth.user_roles"
"""
enum authUserRoles_select_column {
"""
column name
"""
createdAt
"""
column name
"""
id
"""
column name
"""
role
"""
column name
"""
userId
}
"""
input type for updating data in table "auth.user_roles"
"""
input authUserRoles_set_input {
createdAt: timestamptz
id: uuid
role: String
userId: uuid
}
"""
Streaming cursor of the table "authUserRoles"
"""
input authUserRoles_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: authUserRoles_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input authUserRoles_stream_cursor_value_input {
createdAt: timestamptz
id: uuid
role: String
userId: uuid
}
"""
update columns of table "auth.user_roles"
"""
enum authUserRoles_update_column {
"""
column name
"""
createdAt
"""
column name
"""
id
"""
column name
"""
role
"""
column name
"""
userId
}
input authUserRoles_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserRoles_set_input
"""
filter the rows which have to be updated
"""
where: authUserRoles_bool_exp!
}
"""
User webauthn security keys. Don't modify its structure as Hasura Auth relies on it to function properly.
"""
type authUserSecurityKeys {
counter: bigint!
credentialId: String!
credentialPublicKey: bytea
id: uuid!
nickname: String
transports: String!
"""
An object relationship
"""
user: users!
userId: uuid!
}
"""
aggregated selection of "auth.user_security_keys"
"""
type authUserSecurityKeys_aggregate {
aggregate: authUserSecurityKeys_aggregate_fields
nodes: [authUserSecurityKeys!]!
}
input authUserSecurityKeys_aggregate_bool_exp {
count: authUserSecurityKeys_aggregate_bool_exp_count
}
input authUserSecurityKeys_aggregate_bool_exp_count {
arguments: [authUserSecurityKeys_select_column!]
distinct: Boolean
filter: authUserSecurityKeys_bool_exp
predicate: Int_comparison_exp!
}
"""
aggregate fields of "auth.user_security_keys"
"""
type authUserSecurityKeys_aggregate_fields {
avg: authUserSecurityKeys_avg_fields
count(columns: [authUserSecurityKeys_select_column!], distinct: Boolean): Int!
max: authUserSecurityKeys_max_fields
min: authUserSecurityKeys_min_fields
stddev: authUserSecurityKeys_stddev_fields
stddev_pop: authUserSecurityKeys_stddev_pop_fields
stddev_samp: authUserSecurityKeys_stddev_samp_fields
sum: authUserSecurityKeys_sum_fields
var_pop: authUserSecurityKeys_var_pop_fields
var_samp: authUserSecurityKeys_var_samp_fields
variance: authUserSecurityKeys_variance_fields
}
"""
order by aggregate values of table "auth.user_security_keys"
"""
input authUserSecurityKeys_aggregate_order_by {
avg: authUserSecurityKeys_avg_order_by
count: order_by
max: authUserSecurityKeys_max_order_by
min: authUserSecurityKeys_min_order_by
stddev: authUserSecurityKeys_stddev_order_by
stddev_pop: authUserSecurityKeys_stddev_pop_order_by
stddev_samp: authUserSecurityKeys_stddev_samp_order_by
sum: authUserSecurityKeys_sum_order_by
var_pop: authUserSecurityKeys_var_pop_order_by
var_samp: authUserSecurityKeys_var_samp_order_by
variance: authUserSecurityKeys_variance_order_by
}
"""
input type for inserting array relation for remote table "auth.user_security_keys"
"""
input authUserSecurityKeys_arr_rel_insert_input {
data: [authUserSecurityKeys_insert_input!]!
"""
upsert condition
"""
on_conflict: authUserSecurityKeys_on_conflict
}
"""
aggregate avg on columns
"""
type authUserSecurityKeys_avg_fields {
counter: Float
}
"""
order by avg() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_avg_order_by {
counter: order_by
}
"""
Boolean expression to filter rows from the table "auth.user_security_keys". All fields are combined with a logical 'AND'.
"""
input authUserSecurityKeys_bool_exp {
_and: [authUserSecurityKeys_bool_exp!]
_not: authUserSecurityKeys_bool_exp
_or: [authUserSecurityKeys_bool_exp!]
counter: bigint_comparison_exp
credentialId: String_comparison_exp
credentialPublicKey: bytea_comparison_exp
id: uuid_comparison_exp
nickname: String_comparison_exp
transports: String_comparison_exp
user: users_bool_exp
userId: uuid_comparison_exp
}
"""
unique or primary key constraints on table "auth.user_security_keys"
"""
enum authUserSecurityKeys_constraint {
"""
unique or primary key constraint on columns "credential_id"
"""
user_security_key_credential_id_key
"""
unique or primary key constraint on columns "id"
"""
user_security_keys_pkey
}
"""
input type for incrementing numeric columns in table "auth.user_security_keys"
"""
input authUserSecurityKeys_inc_input {
counter: bigint
}
"""
input type for inserting data into table "auth.user_security_keys"
"""
input authUserSecurityKeys_insert_input {
counter: bigint
credentialId: String
credentialPublicKey: bytea
id: uuid
nickname: String
transports: String
user: users_obj_rel_insert_input
userId: uuid
}
"""
aggregate max on columns
"""
type authUserSecurityKeys_max_fields {
counter: bigint
credentialId: String
id: uuid
nickname: String
transports: String
userId: uuid
}
"""
order by max() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_max_order_by {
counter: order_by
credentialId: order_by
id: order_by
nickname: order_by
transports: order_by
userId: order_by
}
"""
aggregate min on columns
"""
type authUserSecurityKeys_min_fields {
counter: bigint
credentialId: String
id: uuid
nickname: String
transports: String
userId: uuid
}
"""
order by min() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_min_order_by {
counter: order_by
credentialId: order_by
id: order_by
nickname: order_by
transports: order_by
userId: order_by
}
"""
response of any mutation on the table "auth.user_security_keys"
"""
type authUserSecurityKeys_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [authUserSecurityKeys!]!
}
"""
on_conflict condition type for table "auth.user_security_keys"
"""
input authUserSecurityKeys_on_conflict {
constraint: authUserSecurityKeys_constraint!
update_columns: [authUserSecurityKeys_update_column!]! = []
where: authUserSecurityKeys_bool_exp
}
"""
Ordering options when selecting data from "auth.user_security_keys".
"""
input authUserSecurityKeys_order_by {
counter: order_by
credentialId: order_by
credentialPublicKey: order_by
id: order_by
nickname: order_by
transports: order_by
user: users_order_by
userId: order_by
}
"""
primary key columns input for table: auth.user_security_keys
"""
input authUserSecurityKeys_pk_columns_input {
id: uuid!
}
"""
select columns of table "auth.user_security_keys"
"""
enum authUserSecurityKeys_select_column {
"""
column name
"""
counter
"""
column name
"""
credentialId
"""
column name
"""
credentialPublicKey
"""
column name
"""
id
"""
column name
"""
nickname
"""
column name
"""
transports
"""
column name
"""
userId
}
"""
input type for updating data in table "auth.user_security_keys"
"""
input authUserSecurityKeys_set_input {
counter: bigint
credentialId: String
credentialPublicKey: bytea
id: uuid
nickname: String
transports: String
userId: uuid
}
"""
aggregate stddev on columns
"""
type authUserSecurityKeys_stddev_fields {
counter: Float
}
"""
order by stddev() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_stddev_order_by {
counter: order_by
}
"""
aggregate stddev_pop on columns
"""
type authUserSecurityKeys_stddev_pop_fields {
counter: Float
}
"""
order by stddev_pop() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_stddev_pop_order_by {
counter: order_by
}
"""
aggregate stddev_samp on columns
"""
type authUserSecurityKeys_stddev_samp_fields {
counter: Float
}
"""
order by stddev_samp() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_stddev_samp_order_by {
counter: order_by
}
"""
Streaming cursor of the table "authUserSecurityKeys"
"""
input authUserSecurityKeys_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: authUserSecurityKeys_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input authUserSecurityKeys_stream_cursor_value_input {
counter: bigint
credentialId: String
credentialPublicKey: bytea
id: uuid
nickname: String
transports: String
userId: uuid
}
"""
aggregate sum on columns
"""
type authUserSecurityKeys_sum_fields {
counter: bigint
}
"""
order by sum() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_sum_order_by {
counter: order_by
}
"""
update columns of table "auth.user_security_keys"
"""
enum authUserSecurityKeys_update_column {
"""
column name
"""
counter
"""
column name
"""
credentialId
"""
column name
"""
credentialPublicKey
"""
column name
"""
id
"""
column name
"""
nickname
"""
column name
"""
transports
"""
column name
"""
userId
}
input authUserSecurityKeys_updates {
"""
increments the numeric columns with given value of the filtered values
"""
_inc: authUserSecurityKeys_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserSecurityKeys_set_input
"""
filter the rows which have to be updated
"""
where: authUserSecurityKeys_bool_exp!
}
"""
aggregate var_pop on columns
"""
type authUserSecurityKeys_var_pop_fields {
counter: Float
}
"""
order by var_pop() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_var_pop_order_by {
counter: order_by
}
"""
aggregate var_samp on columns
"""
type authUserSecurityKeys_var_samp_fields {
counter: Float
}
"""
order by var_samp() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_var_samp_order_by {
counter: order_by
}
"""
aggregate variance on columns
"""
type authUserSecurityKeys_variance_fields {
counter: Float
}
"""
order by variance() on columns of table "auth.user_security_keys"
"""
input authUserSecurityKeys_variance_order_by {
counter: order_by
}
scalar bigint
"""
Boolean expression to compare columns of type "bigint". All fields are combined with logical 'AND'.
"""
input bigint_comparison_exp {
_eq: bigint
_gt: bigint
_gte: bigint
_in: [bigint!]
_is_null: Boolean
_lt: bigint
_lte: bigint
_neq: bigint
_nin: [bigint!]
}
"""
columns and relationships of "storage.buckets"
"""
type buckets {
cacheControl: String
createdAt: timestamptz!
downloadExpiration: Int!
"""
An array relationship
"""
files(
"""
distinct select on columns
"""
distinct_on: [files_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [files_order_by!]
"""
filter the rows returned
"""
where: files_bool_exp
): [files!]!
"""
An aggregate relationship
"""
files_aggregate(
"""
distinct select on columns
"""
distinct_on: [files_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [files_order_by!]
"""
filter the rows returned
"""
where: files_bool_exp
): files_aggregate!
id: String!
maxUploadFileSize: Int!
minUploadFileSize: Int!
presignedUrlsEnabled: Boolean!
updatedAt: timestamptz!
}
"""
aggregated selection of "storage.buckets"
"""
type buckets_aggregate {
aggregate: buckets_aggregate_fields
nodes: [buckets!]!
}
"""
aggregate fields of "storage.buckets"
"""
type buckets_aggregate_fields {
avg: buckets_avg_fields
count(columns: [buckets_select_column!], distinct: Boolean): Int!
max: buckets_max_fields
min: buckets_min_fields
stddev: buckets_stddev_fields
stddev_pop: buckets_stddev_pop_fields
stddev_samp: buckets_stddev_samp_fields
sum: buckets_sum_fields
var_pop: buckets_var_pop_fields
var_samp: buckets_var_samp_fields
variance: buckets_variance_fields
}
"""
aggregate avg on columns
"""
type buckets_avg_fields {
downloadExpiration: Float
maxUploadFileSize: Float
minUploadFileSize: Float
}
"""
Boolean expression to filter rows from the table "storage.buckets". All fields are combined with a logical 'AND'.
"""
input buckets_bool_exp {
_and: [buckets_bool_exp!]
_not: buckets_bool_exp
_or: [buckets_bool_exp!]
cacheControl: String_comparison_exp
createdAt: timestamptz_comparison_exp
downloadExpiration: Int_comparison_exp
files: files_bool_exp
files_aggregate: files_aggregate_bool_exp
id: String_comparison_exp
maxUploadFileSize: Int_comparison_exp
minUploadFileSize: Int_comparison_exp
presignedUrlsEnabled: Boolean_comparison_exp
updatedAt: timestamptz_comparison_exp
}
"""
unique or primary key constraints on table "storage.buckets"
"""
enum buckets_constraint {
"""
unique or primary key constraint on columns "id"
"""
buckets_pkey
}
"""
input type for incrementing numeric columns in table "storage.buckets"
"""
input buckets_inc_input {
downloadExpiration: Int
maxUploadFileSize: Int
minUploadFileSize: Int
}
"""
input type for inserting data into table "storage.buckets"
"""
input buckets_insert_input {
cacheControl: String
createdAt: timestamptz
downloadExpiration: Int
files: files_arr_rel_insert_input
id: String
maxUploadFileSize: Int
minUploadFileSize: Int
presignedUrlsEnabled: Boolean
updatedAt: timestamptz
}
"""
aggregate max on columns
"""
type buckets_max_fields {
cacheControl: String
createdAt: timestamptz
downloadExpiration: Int
id: String
maxUploadFileSize: Int
minUploadFileSize: Int
updatedAt: timestamptz
}
"""
aggregate min on columns
"""
type buckets_min_fields {
cacheControl: String
createdAt: timestamptz
downloadExpiration: Int
id: String
maxUploadFileSize: Int
minUploadFileSize: Int
updatedAt: timestamptz
}
"""
response of any mutation on the table "storage.buckets"
"""
type buckets_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [buckets!]!
}
"""
input type for inserting object relation for remote table "storage.buckets"
"""
input buckets_obj_rel_insert_input {
data: buckets_insert_input!
"""
upsert condition
"""
on_conflict: buckets_on_conflict
}
"""
on_conflict condition type for table "storage.buckets"
"""
input buckets_on_conflict {
constraint: buckets_constraint!
update_columns: [buckets_update_column!]! = []
where: buckets_bool_exp
}
"""
Ordering options when selecting data from "storage.buckets".
"""
input buckets_order_by {
cacheControl: order_by
createdAt: order_by
downloadExpiration: order_by
files_aggregate: files_aggregate_order_by
id: order_by
maxUploadFileSize: order_by
minUploadFileSize: order_by
presignedUrlsEnabled: order_by
updatedAt: order_by
}
"""
primary key columns input for table: storage.buckets
"""
input buckets_pk_columns_input {
id: String!
}
"""
select columns of table "storage.buckets"
"""
enum buckets_select_column {
"""
column name
"""
cacheControl
"""
column name
"""
createdAt
"""
column name
"""
downloadExpiration
"""
column name
"""
id
"""
column name
"""
maxUploadFileSize
"""
column name
"""
minUploadFileSize
"""
column name
"""
presignedUrlsEnabled
"""
column name
"""
updatedAt
}
"""
input type for updating data in table "storage.buckets"
"""
input buckets_set_input {
cacheControl: String
createdAt: timestamptz
downloadExpiration: Int
id: String
maxUploadFileSize: Int
minUploadFileSize: Int
presignedUrlsEnabled: Boolean
updatedAt: timestamptz
}
"""
aggregate stddev on columns
"""
type buckets_stddev_fields {
downloadExpiration: Float
maxUploadFileSize: Float
minUploadFileSize: Float
}
"""
aggregate stddev_pop on columns
"""
type buckets_stddev_pop_fields {
downloadExpiration: Float
maxUploadFileSize: Float
minUploadFileSize: Float
}
"""
aggregate stddev_samp on columns
"""
type buckets_stddev_samp_fields {
downloadExpiration: Float
maxUploadFileSize: Float
minUploadFileSize: Float
}
"""
Streaming cursor of the table "buckets"
"""
input buckets_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: buckets_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input buckets_stream_cursor_value_input {
cacheControl: String
createdAt: timestamptz
downloadExpiration: Int
id: String
maxUploadFileSize: Int
minUploadFileSize: Int
presignedUrlsEnabled: Boolean
updatedAt: timestamptz
}
"""
aggregate sum on columns
"""
type buckets_sum_fields {
downloadExpiration: Int
maxUploadFileSize: Int
minUploadFileSize: Int
}
"""
update columns of table "storage.buckets"
"""
enum buckets_update_column {
"""
column name
"""
cacheControl
"""
column name
"""
createdAt
"""
column name
"""
downloadExpiration
"""
column name
"""
id
"""
column name
"""
maxUploadFileSize
"""
column name
"""
minUploadFileSize
"""
column name
"""
presignedUrlsEnabled
"""
column name
"""
updatedAt
}
input buckets_updates {
"""
increments the numeric columns with given value of the filtered values
"""
_inc: buckets_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: buckets_set_input
"""
filter the rows which have to be updated
"""
where: buckets_bool_exp!
}
"""
aggregate var_pop on columns
"""
type buckets_var_pop_fields {
downloadExpiration: Float
maxUploadFileSize: Float
minUploadFileSize: Float
}
"""
aggregate var_samp on columns
"""
type buckets_var_samp_fields {
downloadExpiration: Float
maxUploadFileSize: Float
minUploadFileSize: Float
}
"""
aggregate variance on columns
"""
type buckets_variance_fields {
downloadExpiration: Float
maxUploadFileSize: Float
minUploadFileSize: Float
}
scalar bytea
"""
Boolean expression to compare columns of type "bytea". All fields are combined with logical 'AND'.
"""
input bytea_comparison_exp {
_eq: bytea
_gt: bytea
_gte: bytea
_in: [bytea!]
_is_null: Boolean
_lt: bytea
_lte: bytea
_neq: bytea
_nin: [bytea!]
}
scalar citext
"""
Boolean expression to compare columns of type "citext". All fields are combined with logical 'AND'.
"""
input citext_comparison_exp {
_eq: citext
_gt: citext
_gte: citext
"""
does the column match the given case-insensitive pattern
"""
_ilike: citext
_in: [citext!]
"""
does the column match the given POSIX regular expression, case insensitive
"""
_iregex: citext
_is_null: Boolean
"""
does the column match the given pattern
"""
_like: citext
_lt: citext
_lte: citext
_neq: citext
"""
does the column NOT match the given case-insensitive pattern
"""
_nilike: citext
_nin: [citext!]
"""
does the column NOT match the given POSIX regular expression, case insensitive
"""
_niregex: citext
"""
does the column NOT match the given pattern
"""
_nlike: citext
"""
does the column NOT match the given POSIX regular expression, case sensitive
"""
_nregex: citext
"""
does the column NOT match the given SQL regular expression
"""
_nsimilar: citext
"""
does the column match the given POSIX regular expression, case sensitive
"""
_regex: citext
"""
does the column match the given SQL regular expression
"""
_similar: citext
}
"""
columns and relationships of "comments"
"""
type comments {
comment: String!
createdAt: timestamptz!
id: uuid!
"""
An object relationship
"""
ninjaTurtle: ninjaTurtles!
ninjaTurtleId: uuid!
updatedAt: timestamptz!
"""
An object relationship
"""
user: users!
userId: uuid!
}
"""
aggregated selection of "comments"
"""
type comments_aggregate {
aggregate: comments_aggregate_fields
nodes: [comments!]!
}
input comments_aggregate_bool_exp {
count: comments_aggregate_bool_exp_count
}
input comments_aggregate_bool_exp_count {
arguments: [comments_select_column!]
distinct: Boolean
filter: comments_bool_exp
predicate: Int_comparison_exp!
}
"""
aggregate fields of "comments"
"""
type comments_aggregate_fields {
count(columns: [comments_select_column!], distinct: Boolean): Int!
max: comments_max_fields
min: comments_min_fields
}
"""
order by aggregate values of table "comments"
"""
input comments_aggregate_order_by {
count: order_by
max: comments_max_order_by
min: comments_min_order_by
}
"""
input type for inserting array relation for remote table "comments"
"""
input comments_arr_rel_insert_input {
data: [comments_insert_input!]!
"""
upsert condition
"""
on_conflict: comments_on_conflict
}
"""
Boolean expression to filter rows from the table "comments". All fields are combined with a logical 'AND'.
"""
input comments_bool_exp {
_and: [comments_bool_exp!]
_not: comments_bool_exp
_or: [comments_bool_exp!]
comment: String_comparison_exp
createdAt: timestamptz_comparison_exp
id: uuid_comparison_exp
ninjaTurtle: ninjaTurtles_bool_exp
ninjaTurtleId: uuid_comparison_exp
updatedAt: timestamptz_comparison_exp
user: users_bool_exp
userId: uuid_comparison_exp
}
"""
unique or primary key constraints on table "comments"
"""
enum comments_constraint {
"""
unique or primary key constraint on columns "id"
"""
comments_pkey
}
"""
input type for inserting data into table "comments"
"""
input comments_insert_input {
comment: String
createdAt: timestamptz
id: uuid
ninjaTurtle: ninjaTurtles_obj_rel_insert_input
ninjaTurtleId: uuid
updatedAt: timestamptz
user: users_obj_rel_insert_input
userId: uuid
}
"""
aggregate max on columns
"""
type comments_max_fields {
comment: String
createdAt: timestamptz
id: uuid
ninjaTurtleId: uuid
updatedAt: timestamptz
userId: uuid
}
"""
order by max() on columns of table "comments"
"""
input comments_max_order_by {
comment: order_by
createdAt: order_by
id: order_by
ninjaTurtleId: order_by
updatedAt: order_by
userId: order_by
}
"""
aggregate min on columns
"""
type comments_min_fields {
comment: String
createdAt: timestamptz
id: uuid
ninjaTurtleId: uuid
updatedAt: timestamptz
userId: uuid
}
"""
order by min() on columns of table "comments"
"""
input comments_min_order_by {
comment: order_by
createdAt: order_by
id: order_by
ninjaTurtleId: order_by
updatedAt: order_by
userId: order_by
}
"""
response of any mutation on the table "comments"
"""
type comments_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [comments!]!
}
"""
on_conflict condition type for table "comments"
"""
input comments_on_conflict {
constraint: comments_constraint!
update_columns: [comments_update_column!]! = []
where: comments_bool_exp
}
"""
Ordering options when selecting data from "comments".
"""
input comments_order_by {
comment: order_by
createdAt: order_by
id: order_by
ninjaTurtle: ninjaTurtles_order_by
ninjaTurtleId: order_by
updatedAt: order_by
user: users_order_by
userId: order_by
}
"""
primary key columns input for table: comments
"""
input comments_pk_columns_input {
id: uuid!
}
"""
select columns of table "comments"
"""
enum comments_select_column {
"""
column name
"""
comment
"""
column name
"""
createdAt
"""
column name
"""
id
"""
column name
"""
ninjaTurtleId
"""
column name
"""
updatedAt
"""
column name
"""
userId
}
"""
input type for updating data in table "comments"
"""
input comments_set_input {
comment: String
createdAt: timestamptz
id: uuid
ninjaTurtleId: uuid
updatedAt: timestamptz
userId: uuid
}
"""
Streaming cursor of the table "comments"
"""
input comments_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: comments_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input comments_stream_cursor_value_input {
comment: String
createdAt: timestamptz
id: uuid
ninjaTurtleId: uuid
updatedAt: timestamptz
userId: uuid
}
"""
update columns of table "comments"
"""
enum comments_update_column {
"""
column name
"""
comment
"""
column name
"""
createdAt
"""
column name
"""
id
"""
column name
"""
ninjaTurtleId
"""
column name
"""
updatedAt
"""
column name
"""
userId
}
input comments_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: comments_set_input
"""
filter the rows which have to be updated
"""
where: comments_bool_exp!
}
"""
ordering argument of a cursor
"""
enum cursor_ordering {
"""
ascending ordering of the cursor
"""
ASC
"""
descending ordering of the cursor
"""
DESC
}
"""
columns and relationships of "storage.files"
"""
type files {
"""
An object relationship
"""
bucket: buckets!
bucketId: String!
createdAt: timestamptz!
etag: String
id: uuid!
isUploaded: Boolean
metadata(
"""
JSON select path
"""
path: String
): jsonb
mimeType: String
name: String
size: Int
updatedAt: timestamptz!
uploadedByUserId: uuid
}
"""
aggregated selection of "storage.files"
"""
type files_aggregate {
aggregate: files_aggregate_fields
nodes: [files!]!
}
input files_aggregate_bool_exp {
bool_and: files_aggregate_bool_exp_bool_and
bool_or: files_aggregate_bool_exp_bool_or
count: files_aggregate_bool_exp_count
}
input files_aggregate_bool_exp_bool_and {
arguments: files_select_column_files_aggregate_bool_exp_bool_and_arguments_columns!
distinct: Boolean
filter: files_bool_exp
predicate: Boolean_comparison_exp!
}
input files_aggregate_bool_exp_bool_or {
arguments: files_select_column_files_aggregate_bool_exp_bool_or_arguments_columns!
distinct: Boolean
filter: files_bool_exp
predicate: Boolean_comparison_exp!
}
input files_aggregate_bool_exp_count {
arguments: [files_select_column!]
distinct: Boolean
filter: files_bool_exp
predicate: Int_comparison_exp!
}
"""
aggregate fields of "storage.files"
"""
type files_aggregate_fields {
avg: files_avg_fields
count(columns: [files_select_column!], distinct: Boolean): Int!
max: files_max_fields
min: files_min_fields
stddev: files_stddev_fields
stddev_pop: files_stddev_pop_fields
stddev_samp: files_stddev_samp_fields
sum: files_sum_fields
var_pop: files_var_pop_fields
var_samp: files_var_samp_fields
variance: files_variance_fields
}
"""
order by aggregate values of table "storage.files"
"""
input files_aggregate_order_by {
avg: files_avg_order_by
count: order_by
max: files_max_order_by
min: files_min_order_by
stddev: files_stddev_order_by
stddev_pop: files_stddev_pop_order_by
stddev_samp: files_stddev_samp_order_by
sum: files_sum_order_by
var_pop: files_var_pop_order_by
var_samp: files_var_samp_order_by
variance: files_variance_order_by
}
"""
append existing jsonb value of filtered columns with new jsonb value
"""
input files_append_input {
metadata: jsonb
}
"""
input type for inserting array relation for remote table "storage.files"
"""
input files_arr_rel_insert_input {
data: [files_insert_input!]!
"""
upsert condition
"""
on_conflict: files_on_conflict
}
"""
aggregate avg on columns
"""
type files_avg_fields {
size: Float
}
"""
order by avg() on columns of table "storage.files"
"""
input files_avg_order_by {
size: order_by
}
"""
Boolean expression to filter rows from the table "storage.files". All fields are combined with a logical 'AND'.
"""
input files_bool_exp {
_and: [files_bool_exp!]
_not: files_bool_exp
_or: [files_bool_exp!]
bucket: buckets_bool_exp
bucketId: String_comparison_exp
createdAt: timestamptz_comparison_exp
etag: String_comparison_exp
id: uuid_comparison_exp
isUploaded: Boolean_comparison_exp
metadata: jsonb_comparison_exp
mimeType: String_comparison_exp
name: String_comparison_exp
size: Int_comparison_exp
updatedAt: timestamptz_comparison_exp
uploadedByUserId: uuid_comparison_exp
}
"""
unique or primary key constraints on table "storage.files"
"""
enum files_constraint {
"""
unique or primary key constraint on columns "id"
"""
files_pkey
}
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
input files_delete_at_path_input {
metadata: [String!]
}
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
input files_delete_elem_input {
metadata: Int
}
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
input files_delete_key_input {
metadata: String
}
"""
input type for incrementing numeric columns in table "storage.files"
"""
input files_inc_input {
size: Int
}
"""
input type for inserting data into table "storage.files"
"""
input files_insert_input {
bucket: buckets_obj_rel_insert_input
bucketId: String
createdAt: timestamptz
etag: String
id: uuid
isUploaded: Boolean
metadata: jsonb
mimeType: String
name: String
size: Int
updatedAt: timestamptz
uploadedByUserId: uuid
}
"""
aggregate max on columns
"""
type files_max_fields {
bucketId: String
createdAt: timestamptz
etag: String
id: uuid
mimeType: String
name: String
size: Int
updatedAt: timestamptz
uploadedByUserId: uuid
}
"""
order by max() on columns of table "storage.files"
"""
input files_max_order_by {
bucketId: order_by
createdAt: order_by
etag: order_by
id: order_by
mimeType: order_by
name: order_by
size: order_by
updatedAt: order_by
uploadedByUserId: order_by
}
"""
aggregate min on columns
"""
type files_min_fields {
bucketId: String
createdAt: timestamptz
etag: String
id: uuid
mimeType: String
name: String
size: Int
updatedAt: timestamptz
uploadedByUserId: uuid
}
"""
order by min() on columns of table "storage.files"
"""
input files_min_order_by {
bucketId: order_by
createdAt: order_by
etag: order_by
id: order_by
mimeType: order_by
name: order_by
size: order_by
updatedAt: order_by
uploadedByUserId: order_by
}
"""
response of any mutation on the table "storage.files"
"""
type files_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [files!]!
}
"""
input type for inserting object relation for remote table "storage.files"
"""
input files_obj_rel_insert_input {
data: files_insert_input!
"""
upsert condition
"""
on_conflict: files_on_conflict
}
"""
on_conflict condition type for table "storage.files"
"""
input files_on_conflict {
constraint: files_constraint!
update_columns: [files_update_column!]! = []
where: files_bool_exp
}
"""
Ordering options when selecting data from "storage.files".
"""
input files_order_by {
bucket: buckets_order_by
bucketId: order_by
createdAt: order_by
etag: order_by
id: order_by
isUploaded: order_by
metadata: order_by
mimeType: order_by
name: order_by
size: order_by
updatedAt: order_by
uploadedByUserId: order_by
}
"""
primary key columns input for table: storage.files
"""
input files_pk_columns_input {
id: uuid!
}
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
input files_prepend_input {
metadata: jsonb
}
"""
select columns of table "storage.files"
"""
enum files_select_column {
"""
column name
"""
bucketId
"""
column name
"""
createdAt
"""
column name
"""
etag
"""
column name
"""
id
"""
column name
"""
isUploaded
"""
column name
"""
metadata
"""
column name
"""
mimeType
"""
column name
"""
name
"""
column name
"""
size
"""
column name
"""
updatedAt
"""
column name
"""
uploadedByUserId
}
"""
select "files_aggregate_bool_exp_bool_and_arguments_columns" columns of table "storage.files"
"""
enum files_select_column_files_aggregate_bool_exp_bool_and_arguments_columns {
"""
column name
"""
isUploaded
}
"""
select "files_aggregate_bool_exp_bool_or_arguments_columns" columns of table "storage.files"
"""
enum files_select_column_files_aggregate_bool_exp_bool_or_arguments_columns {
"""
column name
"""
isUploaded
}
"""
input type for updating data in table "storage.files"
"""
input files_set_input {
bucketId: String
createdAt: timestamptz
etag: String
id: uuid
isUploaded: Boolean
metadata: jsonb
mimeType: String
name: String
size: Int
updatedAt: timestamptz
uploadedByUserId: uuid
}
"""
aggregate stddev on columns
"""
type files_stddev_fields {
size: Float
}
"""
order by stddev() on columns of table "storage.files"
"""
input files_stddev_order_by {
size: order_by
}
"""
aggregate stddev_pop on columns
"""
type files_stddev_pop_fields {
size: Float
}
"""
order by stddev_pop() on columns of table "storage.files"
"""
input files_stddev_pop_order_by {
size: order_by
}
"""
aggregate stddev_samp on columns
"""
type files_stddev_samp_fields {
size: Float
}
"""
order by stddev_samp() on columns of table "storage.files"
"""
input files_stddev_samp_order_by {
size: order_by
}
"""
Streaming cursor of the table "files"
"""
input files_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: files_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input files_stream_cursor_value_input {
bucketId: String
createdAt: timestamptz
etag: String
id: uuid
isUploaded: Boolean
metadata: jsonb
mimeType: String
name: String
size: Int
updatedAt: timestamptz
uploadedByUserId: uuid
}
"""
aggregate sum on columns
"""
type files_sum_fields {
size: Int
}
"""
order by sum() on columns of table "storage.files"
"""
input files_sum_order_by {
size: order_by
}
"""
update columns of table "storage.files"
"""
enum files_update_column {
"""
column name
"""
bucketId
"""
column name
"""
createdAt
"""
column name
"""
etag
"""
column name
"""
id
"""
column name
"""
isUploaded
"""
column name
"""
metadata
"""
column name
"""
mimeType
"""
column name
"""
name
"""
column name
"""
size
"""
column name
"""
updatedAt
"""
column name
"""
uploadedByUserId
}
input files_updates {
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: files_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: files_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: files_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: files_delete_key_input
"""
increments the numeric columns with given value of the filtered values
"""
_inc: files_inc_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: files_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: files_set_input
"""
filter the rows which have to be updated
"""
where: files_bool_exp!
}
"""
aggregate var_pop on columns
"""
type files_var_pop_fields {
size: Float
}
"""
order by var_pop() on columns of table "storage.files"
"""
input files_var_pop_order_by {
size: order_by
}
"""
aggregate var_samp on columns
"""
type files_var_samp_fields {
size: Float
}
"""
order by var_samp() on columns of table "storage.files"
"""
input files_var_samp_order_by {
size: order_by
}
"""
aggregate variance on columns
"""
type files_variance_fields {
size: Float
}
"""
order by variance() on columns of table "storage.files"
"""
input files_variance_order_by {
size: order_by
}
scalar jsonb
input jsonb_cast_exp {
String: String_comparison_exp
}
"""
Boolean expression to compare columns of type "jsonb". All fields are combined with logical 'AND'.
"""
input jsonb_comparison_exp {
_cast: jsonb_cast_exp
"""
is the column contained in the given json value
"""
_contained_in: jsonb
"""
does the column contain the given json value at the top level
"""
_contains: jsonb
_eq: jsonb
_gt: jsonb
_gte: jsonb
"""
does the string exist as a top-level key in the column
"""
_has_key: String
"""
do all of these strings exist as top-level keys in the column
"""
_has_keys_all: [String!]
"""
do any of these strings exist as top-level keys in the column
"""
_has_keys_any: [String!]
_in: [jsonb!]
_is_null: Boolean
_lt: jsonb
_lte: jsonb
_neq: jsonb
_nin: [jsonb!]
}
"""
columns and relationships of "movies"
"""
type movies {
created_at: timestamptz
director: String
genre: String
id: uuid!
rating: numeric
release_year: Int
title: String!
updated_at: timestamptz
}
"""
aggregated selection of "movies"
"""
type movies_aggregate {
aggregate: movies_aggregate_fields
nodes: [movies!]!
}
"""
aggregate fields of "movies"
"""
type movies_aggregate_fields {
avg: movies_avg_fields
count(columns: [movies_select_column!], distinct: Boolean): Int!
max: movies_max_fields
min: movies_min_fields
stddev: movies_stddev_fields
stddev_pop: movies_stddev_pop_fields
stddev_samp: movies_stddev_samp_fields
sum: movies_sum_fields
var_pop: movies_var_pop_fields
var_samp: movies_var_samp_fields
variance: movies_variance_fields
}
"""
aggregate avg on columns
"""
type movies_avg_fields {
rating: Float
release_year: Float
}
"""
Boolean expression to filter rows from the table "movies". All fields are combined with a logical 'AND'.
"""
input movies_bool_exp {
_and: [movies_bool_exp!]
_not: movies_bool_exp
_or: [movies_bool_exp!]
created_at: timestamptz_comparison_exp
director: String_comparison_exp
genre: String_comparison_exp
id: uuid_comparison_exp
rating: numeric_comparison_exp
release_year: Int_comparison_exp
title: String_comparison_exp
updated_at: timestamptz_comparison_exp
}
"""
unique or primary key constraints on table "movies"
"""
enum movies_constraint {
"""
unique or primary key constraint on columns "id"
"""
movies_pkey
}
"""
input type for incrementing numeric columns in table "movies"
"""
input movies_inc_input {
rating: numeric
release_year: Int
}
"""
input type for inserting data into table "movies"
"""
input movies_insert_input {
created_at: timestamptz
director: String
genre: String
id: uuid
rating: numeric
release_year: Int
title: String
updated_at: timestamptz
}
"""
aggregate max on columns
"""
type movies_max_fields {
created_at: timestamptz
director: String
genre: String
id: uuid
rating: numeric
release_year: Int
title: String
updated_at: timestamptz
}
"""
aggregate min on columns
"""
type movies_min_fields {
created_at: timestamptz
director: String
genre: String
id: uuid
rating: numeric
release_year: Int
title: String
updated_at: timestamptz
}
"""
response of any mutation on the table "movies"
"""
type movies_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [movies!]!
}
"""
on_conflict condition type for table "movies"
"""
input movies_on_conflict {
constraint: movies_constraint!
update_columns: [movies_update_column!]! = []
where: movies_bool_exp
}
"""
Ordering options when selecting data from "movies".
"""
input movies_order_by {
created_at: order_by
director: order_by
genre: order_by
id: order_by
rating: order_by
release_year: order_by
title: order_by
updated_at: order_by
}
"""
primary key columns input for table: movies
"""
input movies_pk_columns_input {
id: uuid!
}
"""
select columns of table "movies"
"""
enum movies_select_column {
"""
column name
"""
created_at
"""
column name
"""
director
"""
column name
"""
genre
"""
column name
"""
id
"""
column name
"""
rating
"""
column name
"""
release_year
"""
column name
"""
title
"""
column name
"""
updated_at
}
"""
input type for updating data in table "movies"
"""
input movies_set_input {
created_at: timestamptz
director: String
genre: String
id: uuid
rating: numeric
release_year: Int
title: String
updated_at: timestamptz
}
"""
aggregate stddev on columns
"""
type movies_stddev_fields {
rating: Float
release_year: Float
}
"""
aggregate stddev_pop on columns
"""
type movies_stddev_pop_fields {
rating: Float
release_year: Float
}
"""
aggregate stddev_samp on columns
"""
type movies_stddev_samp_fields {
rating: Float
release_year: Float
}
"""
Streaming cursor of the table "movies"
"""
input movies_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: movies_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input movies_stream_cursor_value_input {
created_at: timestamptz
director: String
genre: String
id: uuid
rating: numeric
release_year: Int
title: String
updated_at: timestamptz
}
"""
aggregate sum on columns
"""
type movies_sum_fields {
rating: numeric
release_year: Int
}
"""
update columns of table "movies"
"""
enum movies_update_column {
"""
column name
"""
created_at
"""
column name
"""
director
"""
column name
"""
genre
"""
column name
"""
id
"""
column name
"""
rating
"""
column name
"""
release_year
"""
column name
"""
title
"""
column name
"""
updated_at
}
input movies_updates {
"""
increments the numeric columns with given value of the filtered values
"""
_inc: movies_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: movies_set_input
"""
filter the rows which have to be updated
"""
where: movies_bool_exp!
}
"""
aggregate var_pop on columns
"""
type movies_var_pop_fields {
rating: Float
release_year: Float
}
"""
aggregate var_samp on columns
"""
type movies_var_samp_fields {
rating: Float
release_year: Float
}
"""
aggregate variance on columns
"""
type movies_variance_fields {
rating: Float
release_year: Float
}
"""
mutation root
"""
type mutation_root {
"""
delete single row from the table: "attachments"
"""
deleteAttachment(file_id: uuid!, taskID: uuid!): attachments
"""
delete data from the table: "attachments"
"""
deleteAttachments(
"""
filter the rows which have to be deleted
"""
where: attachments_bool_exp!
): attachments_mutation_response
"""
delete single row from the table: "auth.providers"
"""
deleteAuthProvider(id: String!): authProviders
"""
delete single row from the table: "auth.provider_requests"
"""
deleteAuthProviderRequest(id: uuid!): authProviderRequests
"""
delete data from the table: "auth.provider_requests"
"""
deleteAuthProviderRequests(
"""
filter the rows which have to be deleted
"""
where: authProviderRequests_bool_exp!
): authProviderRequests_mutation_response
"""
delete data from the table: "auth.providers"
"""
deleteAuthProviders(
"""
filter the rows which have to be deleted
"""
where: authProviders_bool_exp!
): authProviders_mutation_response
"""
delete single row from the table: "auth.refresh_tokens"
"""
deleteAuthRefreshToken(id: uuid!): authRefreshTokens
"""
delete single row from the table: "auth.refresh_token_types"
"""
deleteAuthRefreshTokenType(value: String!): authRefreshTokenTypes
"""
delete data from the table: "auth.refresh_token_types"
"""
deleteAuthRefreshTokenTypes(
"""
filter the rows which have to be deleted
"""
where: authRefreshTokenTypes_bool_exp!
): authRefreshTokenTypes_mutation_response
"""
delete data from the table: "auth.refresh_tokens"
"""
deleteAuthRefreshTokens(
"""
filter the rows which have to be deleted
"""
where: authRefreshTokens_bool_exp!
): authRefreshTokens_mutation_response
"""
delete single row from the table: "auth.roles"
"""
deleteAuthRole(role: String!): authRoles
"""
delete data from the table: "auth.roles"
"""
deleteAuthRoles(
"""
filter the rows which have to be deleted
"""
where: authRoles_bool_exp!
): authRoles_mutation_response
"""
delete single row from the table: "auth.user_providers"
"""
deleteAuthUserProvider(id: uuid!): authUserProviders
"""
delete data from the table: "auth.user_providers"
"""
deleteAuthUserProviders(
"""
filter the rows which have to be deleted
"""
where: authUserProviders_bool_exp!
): authUserProviders_mutation_response
"""
delete single row from the table: "auth.user_roles"
"""
deleteAuthUserRole(id: uuid!): authUserRoles
"""
delete data from the table: "auth.user_roles"
"""
deleteAuthUserRoles(
"""
filter the rows which have to be deleted
"""
where: authUserRoles_bool_exp!
): authUserRoles_mutation_response
"""
delete single row from the table: "auth.user_security_keys"
"""
deleteAuthUserSecurityKey(id: uuid!): authUserSecurityKeys
"""
delete data from the table: "auth.user_security_keys"
"""
deleteAuthUserSecurityKeys(
"""
filter the rows which have to be deleted
"""
where: authUserSecurityKeys_bool_exp!
): authUserSecurityKeys_mutation_response
"""
delete single row from the table: "storage.buckets"
"""
deleteBucket(id: String!): buckets
"""
delete data from the table: "storage.buckets"
"""
deleteBuckets(
"""
filter the rows which have to be deleted
"""
where: buckets_bool_exp!
): buckets_mutation_response
"""
delete single row from the table: "comments"
"""
deleteComment(id: uuid!): comments
"""
delete data from the table: "comments"
"""
deleteComments(
"""
filter the rows which have to be deleted
"""
where: comments_bool_exp!
): comments_mutation_response
"""
delete single row from the table: "storage.files"
"""
deleteFile(id: uuid!): files
"""
delete data from the table: "storage.files"
"""
deleteFiles(
"""
filter the rows which have to be deleted
"""
where: files_bool_exp!
): files_mutation_response
"""
delete single row from the table: "ninja_turtles"
"""
deleteNinjaTurtle(id: uuid!): ninjaTurtles
"""
delete data from the table: "ninja_turtles"
"""
deleteNinjaTurtles(
"""
filter the rows which have to be deleted
"""
where: ninjaTurtles_bool_exp!
): ninjaTurtles_mutation_response
"""
delete single row from the table: "tasks"
"""
deleteTask(id: uuid!): tasks
"""
delete data from the table: "tasks"
"""
deleteTasks(
"""
filter the rows which have to be deleted
"""
where: tasks_bool_exp!
): tasks_mutation_response
"""
delete single row from the table: "auth.users"
"""
deleteUser(id: uuid!): users
"""
delete data from the table: "auth.users"
"""
deleteUsers(
"""
filter the rows which have to be deleted
"""
where: users_bool_exp!
): users_mutation_response
"""
delete single row from the table: "storage.virus"
"""
deleteVirus(id: uuid!): virus
"""
delete data from the table: "storage.virus"
"""
deleteViruses(
"""
filter the rows which have to be deleted
"""
where: virus_bool_exp!
): virus_mutation_response
"""
delete data from the table: "movies"
"""
delete_movies(
"""
filter the rows which have to be deleted
"""
where: movies_bool_exp!
): movies_mutation_response
"""
delete single row from the table: "movies"
"""
delete_movies_by_pk(id: uuid!): movies
"""
insert a single row into the table: "attachments"
"""
insertAttachment(
"""
the row to be inserted
"""
object: attachments_insert_input!
"""
upsert condition
"""
on_conflict: attachments_on_conflict
): attachments
"""
insert data into the table: "attachments"
"""
insertAttachments(
"""
the rows to be inserted
"""
objects: [attachments_insert_input!]!
"""
upsert condition
"""
on_conflict: attachments_on_conflict
): attachments_mutation_response
"""
insert a single row into the table: "auth.providers"
"""
insertAuthProvider(
"""
the row to be inserted
"""
object: authProviders_insert_input!
"""
upsert condition
"""
on_conflict: authProviders_on_conflict
): authProviders
"""
insert a single row into the table: "auth.provider_requests"
"""
insertAuthProviderRequest(
"""
the row to be inserted
"""
object: authProviderRequests_insert_input!
"""
upsert condition
"""
on_conflict: authProviderRequests_on_conflict
): authProviderRequests
"""
insert data into the table: "auth.provider_requests"
"""
insertAuthProviderRequests(
"""
the rows to be inserted
"""
objects: [authProviderRequests_insert_input!]!
"""
upsert condition
"""
on_conflict: authProviderRequests_on_conflict
): authProviderRequests_mutation_response
"""
insert data into the table: "auth.providers"
"""
insertAuthProviders(
"""
the rows to be inserted
"""
objects: [authProviders_insert_input!]!
"""
upsert condition
"""
on_conflict: authProviders_on_conflict
): authProviders_mutation_response
"""
insert a single row into the table: "auth.refresh_tokens"
"""
insertAuthRefreshToken(
"""
the row to be inserted
"""
object: authRefreshTokens_insert_input!
"""
upsert condition
"""
on_conflict: authRefreshTokens_on_conflict
): authRefreshTokens
"""
insert a single row into the table: "auth.refresh_token_types"
"""
insertAuthRefreshTokenType(
"""
the row to be inserted
"""
object: authRefreshTokenTypes_insert_input!
"""
upsert condition
"""
on_conflict: authRefreshTokenTypes_on_conflict
): authRefreshTokenTypes
"""
insert data into the table: "auth.refresh_token_types"
"""
insertAuthRefreshTokenTypes(
"""
the rows to be inserted
"""
objects: [authRefreshTokenTypes_insert_input!]!
"""
upsert condition
"""
on_conflict: authRefreshTokenTypes_on_conflict
): authRefreshTokenTypes_mutation_response
"""
insert data into the table: "auth.refresh_tokens"
"""
insertAuthRefreshTokens(
"""
the rows to be inserted
"""
objects: [authRefreshTokens_insert_input!]!
"""
upsert condition
"""
on_conflict: authRefreshTokens_on_conflict
): authRefreshTokens_mutation_response
"""
insert a single row into the table: "auth.roles"
"""
insertAuthRole(
"""
the row to be inserted
"""
object: authRoles_insert_input!
"""
upsert condition
"""
on_conflict: authRoles_on_conflict
): authRoles
"""
insert data into the table: "auth.roles"
"""
insertAuthRoles(
"""
the rows to be inserted
"""
objects: [authRoles_insert_input!]!
"""
upsert condition
"""
on_conflict: authRoles_on_conflict
): authRoles_mutation_response
"""
insert a single row into the table: "auth.user_providers"
"""
insertAuthUserProvider(
"""
the row to be inserted
"""
object: authUserProviders_insert_input!
"""
upsert condition
"""
on_conflict: authUserProviders_on_conflict
): authUserProviders
"""
insert data into the table: "auth.user_providers"
"""
insertAuthUserProviders(
"""
the rows to be inserted
"""
objects: [authUserProviders_insert_input!]!
"""
upsert condition
"""
on_conflict: authUserProviders_on_conflict
): authUserProviders_mutation_response
"""
insert a single row into the table: "auth.user_roles"
"""
insertAuthUserRole(
"""
the row to be inserted
"""
object: authUserRoles_insert_input!
"""
upsert condition
"""
on_conflict: authUserRoles_on_conflict
): authUserRoles
"""
insert data into the table: "auth.user_roles"
"""
insertAuthUserRoles(
"""
the rows to be inserted
"""
objects: [authUserRoles_insert_input!]!
"""
upsert condition
"""
on_conflict: authUserRoles_on_conflict
): authUserRoles_mutation_response
"""
insert a single row into the table: "auth.user_security_keys"
"""
insertAuthUserSecurityKey(
"""
the row to be inserted
"""
object: authUserSecurityKeys_insert_input!
"""
upsert condition
"""
on_conflict: authUserSecurityKeys_on_conflict
): authUserSecurityKeys
"""
insert data into the table: "auth.user_security_keys"
"""
insertAuthUserSecurityKeys(
"""
the rows to be inserted
"""
objects: [authUserSecurityKeys_insert_input!]!
"""
upsert condition
"""
on_conflict: authUserSecurityKeys_on_conflict
): authUserSecurityKeys_mutation_response
"""
insert a single row into the table: "storage.buckets"
"""
insertBucket(
"""
the row to be inserted
"""
object: buckets_insert_input!
"""
upsert condition
"""
on_conflict: buckets_on_conflict
): buckets
"""
insert data into the table: "storage.buckets"
"""
insertBuckets(
"""
the rows to be inserted
"""
objects: [buckets_insert_input!]!
"""
upsert condition
"""
on_conflict: buckets_on_conflict
): buckets_mutation_response
"""
insert a single row into the table: "comments"
"""
insertComment(
"""
the row to be inserted
"""
object: comments_insert_input!
"""
upsert condition
"""
on_conflict: comments_on_conflict
): comments
"""
insert data into the table: "comments"
"""
insertComments(
"""
the rows to be inserted
"""
objects: [comments_insert_input!]!
"""
upsert condition
"""
on_conflict: comments_on_conflict
): comments_mutation_response
"""
insert a single row into the table: "storage.files"
"""
insertFile(
"""
the row to be inserted
"""
object: files_insert_input!
"""
upsert condition
"""
on_conflict: files_on_conflict
): files
"""
insert data into the table: "storage.files"
"""
insertFiles(
"""
the rows to be inserted
"""
objects: [files_insert_input!]!
"""
upsert condition
"""
on_conflict: files_on_conflict
): files_mutation_response
"""
insert a single row into the table: "ninja_turtles"
"""
insertNinjaTurtle(
"""
the row to be inserted
"""
object: ninjaTurtles_insert_input!
"""
upsert condition
"""
on_conflict: ninjaTurtles_on_conflict
): ninjaTurtles
"""
insert data into the table: "ninja_turtles"
"""
insertNinjaTurtles(
"""
the rows to be inserted
"""
objects: [ninjaTurtles_insert_input!]!
"""
upsert condition
"""
on_conflict: ninjaTurtles_on_conflict
): ninjaTurtles_mutation_response
"""
insert a single row into the table: "tasks"
"""
insertTask(
"""
the row to be inserted
"""
object: tasks_insert_input!
"""
upsert condition
"""
on_conflict: tasks_on_conflict
): tasks
"""
insert data into the table: "tasks"
"""
insertTasks(
"""
the rows to be inserted
"""
objects: [tasks_insert_input!]!
"""
upsert condition
"""
on_conflict: tasks_on_conflict
): tasks_mutation_response
"""
insert a single row into the table: "auth.users"
"""
insertUser(
"""
the row to be inserted
"""
object: users_insert_input!
"""
upsert condition
"""
on_conflict: users_on_conflict
): users
"""
insert data into the table: "auth.users"
"""
insertUsers(
"""
the rows to be inserted
"""
objects: [users_insert_input!]!
"""
upsert condition
"""
on_conflict: users_on_conflict
): users_mutation_response
"""
insert a single row into the table: "storage.virus"
"""
insertVirus(
"""
the row to be inserted
"""
object: virus_insert_input!
"""
upsert condition
"""
on_conflict: virus_on_conflict
): virus
"""
insert data into the table: "storage.virus"
"""
insertViruses(
"""
the rows to be inserted
"""
objects: [virus_insert_input!]!
"""
upsert condition
"""
on_conflict: virus_on_conflict
): virus_mutation_response
"""
insert data into the table: "movies"
"""
insert_movies(
"""
the rows to be inserted
"""
objects: [movies_insert_input!]!
"""
upsert condition
"""
on_conflict: movies_on_conflict
): movies_mutation_response
"""
insert a single row into the table: "movies"
"""
insert_movies_one(
"""
the row to be inserted
"""
object: movies_insert_input!
"""
upsert condition
"""
on_conflict: movies_on_conflict
): movies
"""
update single row of the table: "attachments"
"""
updateAttachment(
"""
sets the columns of the filtered rows to the given values
"""
_set: attachments_set_input
pk_columns: attachments_pk_columns_input!
): attachments
"""
update data of the table: "attachments"
"""
updateAttachments(
"""
sets the columns of the filtered rows to the given values
"""
_set: attachments_set_input
"""
filter the rows which have to be updated
"""
where: attachments_bool_exp!
): attachments_mutation_response
"""
update multiples rows of table: "attachments"
"""
updateAttachmentsMany(
"""
updates to execute, in order
"""
updates: [attachments_updates!]!
): [attachments_mutation_response]
"""
update single row of the table: "auth.providers"
"""
updateAuthProvider(
"""
sets the columns of the filtered rows to the given values
"""
_set: authProviders_set_input
pk_columns: authProviders_pk_columns_input!
): authProviders
"""
update single row of the table: "auth.provider_requests"
"""
updateAuthProviderRequest(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: authProviderRequests_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: authProviderRequests_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: authProviderRequests_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: authProviderRequests_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: authProviderRequests_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authProviderRequests_set_input
pk_columns: authProviderRequests_pk_columns_input!
): authProviderRequests
"""
update data of the table: "auth.provider_requests"
"""
updateAuthProviderRequests(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: authProviderRequests_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: authProviderRequests_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: authProviderRequests_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: authProviderRequests_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: authProviderRequests_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authProviderRequests_set_input
"""
filter the rows which have to be updated
"""
where: authProviderRequests_bool_exp!
): authProviderRequests_mutation_response
"""
update data of the table: "auth.providers"
"""
updateAuthProviders(
"""
sets the columns of the filtered rows to the given values
"""
_set: authProviders_set_input
"""
filter the rows which have to be updated
"""
where: authProviders_bool_exp!
): authProviders_mutation_response
"""
update single row of the table: "auth.refresh_tokens"
"""
updateAuthRefreshToken(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: authRefreshTokens_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: authRefreshTokens_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: authRefreshTokens_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: authRefreshTokens_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: authRefreshTokens_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authRefreshTokens_set_input
pk_columns: authRefreshTokens_pk_columns_input!
): authRefreshTokens
"""
update single row of the table: "auth.refresh_token_types"
"""
updateAuthRefreshTokenType(
"""
sets the columns of the filtered rows to the given values
"""
_set: authRefreshTokenTypes_set_input
pk_columns: authRefreshTokenTypes_pk_columns_input!
): authRefreshTokenTypes
"""
update data of the table: "auth.refresh_token_types"
"""
updateAuthRefreshTokenTypes(
"""
sets the columns of the filtered rows to the given values
"""
_set: authRefreshTokenTypes_set_input
"""
filter the rows which have to be updated
"""
where: authRefreshTokenTypes_bool_exp!
): authRefreshTokenTypes_mutation_response
"""
update data of the table: "auth.refresh_tokens"
"""
updateAuthRefreshTokens(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: authRefreshTokens_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: authRefreshTokens_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: authRefreshTokens_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: authRefreshTokens_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: authRefreshTokens_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authRefreshTokens_set_input
"""
filter the rows which have to be updated
"""
where: authRefreshTokens_bool_exp!
): authRefreshTokens_mutation_response
"""
update single row of the table: "auth.roles"
"""
updateAuthRole(
"""
sets the columns of the filtered rows to the given values
"""
_set: authRoles_set_input
pk_columns: authRoles_pk_columns_input!
): authRoles
"""
update data of the table: "auth.roles"
"""
updateAuthRoles(
"""
sets the columns of the filtered rows to the given values
"""
_set: authRoles_set_input
"""
filter the rows which have to be updated
"""
where: authRoles_bool_exp!
): authRoles_mutation_response
"""
update single row of the table: "auth.user_providers"
"""
updateAuthUserProvider(
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserProviders_set_input
pk_columns: authUserProviders_pk_columns_input!
): authUserProviders
"""
update data of the table: "auth.user_providers"
"""
updateAuthUserProviders(
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserProviders_set_input
"""
filter the rows which have to be updated
"""
where: authUserProviders_bool_exp!
): authUserProviders_mutation_response
"""
update single row of the table: "auth.user_roles"
"""
updateAuthUserRole(
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserRoles_set_input
pk_columns: authUserRoles_pk_columns_input!
): authUserRoles
"""
update data of the table: "auth.user_roles"
"""
updateAuthUserRoles(
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserRoles_set_input
"""
filter the rows which have to be updated
"""
where: authUserRoles_bool_exp!
): authUserRoles_mutation_response
"""
update single row of the table: "auth.user_security_keys"
"""
updateAuthUserSecurityKey(
"""
increments the numeric columns with given value of the filtered values
"""
_inc: authUserSecurityKeys_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserSecurityKeys_set_input
pk_columns: authUserSecurityKeys_pk_columns_input!
): authUserSecurityKeys
"""
update data of the table: "auth.user_security_keys"
"""
updateAuthUserSecurityKeys(
"""
increments the numeric columns with given value of the filtered values
"""
_inc: authUserSecurityKeys_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: authUserSecurityKeys_set_input
"""
filter the rows which have to be updated
"""
where: authUserSecurityKeys_bool_exp!
): authUserSecurityKeys_mutation_response
"""
update single row of the table: "storage.buckets"
"""
updateBucket(
"""
increments the numeric columns with given value of the filtered values
"""
_inc: buckets_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: buckets_set_input
pk_columns: buckets_pk_columns_input!
): buckets
"""
update data of the table: "storage.buckets"
"""
updateBuckets(
"""
increments the numeric columns with given value of the filtered values
"""
_inc: buckets_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: buckets_set_input
"""
filter the rows which have to be updated
"""
where: buckets_bool_exp!
): buckets_mutation_response
"""
update single row of the table: "comments"
"""
updateComment(
"""
sets the columns of the filtered rows to the given values
"""
_set: comments_set_input
pk_columns: comments_pk_columns_input!
): comments
"""
update data of the table: "comments"
"""
updateComments(
"""
sets the columns of the filtered rows to the given values
"""
_set: comments_set_input
"""
filter the rows which have to be updated
"""
where: comments_bool_exp!
): comments_mutation_response
"""
update multiples rows of table: "comments"
"""
updateCommentsMany(
"""
updates to execute, in order
"""
updates: [comments_updates!]!
): [comments_mutation_response]
"""
update single row of the table: "storage.files"
"""
updateFile(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: files_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: files_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: files_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: files_delete_key_input
"""
increments the numeric columns with given value of the filtered values
"""
_inc: files_inc_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: files_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: files_set_input
pk_columns: files_pk_columns_input!
): files
"""
update data of the table: "storage.files"
"""
updateFiles(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: files_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: files_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: files_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: files_delete_key_input
"""
increments the numeric columns with given value of the filtered values
"""
_inc: files_inc_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: files_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: files_set_input
"""
filter the rows which have to be updated
"""
where: files_bool_exp!
): files_mutation_response
"""
update single row of the table: "ninja_turtles"
"""
updateNinjaTurtle(
"""
sets the columns of the filtered rows to the given values
"""
_set: ninjaTurtles_set_input
pk_columns: ninjaTurtles_pk_columns_input!
): ninjaTurtles
"""
update data of the table: "ninja_turtles"
"""
updateNinjaTurtles(
"""
sets the columns of the filtered rows to the given values
"""
_set: ninjaTurtles_set_input
"""
filter the rows which have to be updated
"""
where: ninjaTurtles_bool_exp!
): ninjaTurtles_mutation_response
"""
update multiples rows of table: "ninja_turtles"
"""
updateNinjaTurtlesMany(
"""
updates to execute, in order
"""
updates: [ninjaTurtles_updates!]!
): [ninjaTurtles_mutation_response]
"""
update single row of the table: "tasks"
"""
updateTask(
"""
sets the columns of the filtered rows to the given values
"""
_set: tasks_set_input
pk_columns: tasks_pk_columns_input!
): tasks
"""
update data of the table: "tasks"
"""
updateTasks(
"""
sets the columns of the filtered rows to the given values
"""
_set: tasks_set_input
"""
filter the rows which have to be updated
"""
where: tasks_bool_exp!
): tasks_mutation_response
"""
update multiples rows of table: "tasks"
"""
updateTasksMany(
"""
updates to execute, in order
"""
updates: [tasks_updates!]!
): [tasks_mutation_response]
"""
update single row of the table: "auth.users"
"""
updateUser(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: users_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: users_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: users_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: users_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: users_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: users_set_input
pk_columns: users_pk_columns_input!
): users
"""
update data of the table: "auth.users"
"""
updateUsers(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: users_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: users_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: users_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: users_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: users_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: users_set_input
"""
filter the rows which have to be updated
"""
where: users_bool_exp!
): users_mutation_response
"""
update single row of the table: "storage.virus"
"""
updateVirus(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: virus_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: virus_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: virus_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: virus_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: virus_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: virus_set_input
pk_columns: virus_pk_columns_input!
): virus
"""
update data of the table: "storage.virus"
"""
updateViruses(
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: virus_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: virus_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: virus_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: virus_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: virus_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: virus_set_input
"""
filter the rows which have to be updated
"""
where: virus_bool_exp!
): virus_mutation_response
"""
update multiples rows of table: "auth.provider_requests"
"""
update_authProviderRequests_many(
"""
updates to execute, in order
"""
updates: [authProviderRequests_updates!]!
): [authProviderRequests_mutation_response]
"""
update multiples rows of table: "auth.providers"
"""
update_authProviders_many(
"""
updates to execute, in order
"""
updates: [authProviders_updates!]!
): [authProviders_mutation_response]
"""
update multiples rows of table: "auth.refresh_token_types"
"""
update_authRefreshTokenTypes_many(
"""
updates to execute, in order
"""
updates: [authRefreshTokenTypes_updates!]!
): [authRefreshTokenTypes_mutation_response]
"""
update multiples rows of table: "auth.refresh_tokens"
"""
update_authRefreshTokens_many(
"""
updates to execute, in order
"""
updates: [authRefreshTokens_updates!]!
): [authRefreshTokens_mutation_response]
"""
update multiples rows of table: "auth.roles"
"""
update_authRoles_many(
"""
updates to execute, in order
"""
updates: [authRoles_updates!]!
): [authRoles_mutation_response]
"""
update multiples rows of table: "auth.user_providers"
"""
update_authUserProviders_many(
"""
updates to execute, in order
"""
updates: [authUserProviders_updates!]!
): [authUserProviders_mutation_response]
"""
update multiples rows of table: "auth.user_roles"
"""
update_authUserRoles_many(
"""
updates to execute, in order
"""
updates: [authUserRoles_updates!]!
): [authUserRoles_mutation_response]
"""
update multiples rows of table: "auth.user_security_keys"
"""
update_authUserSecurityKeys_many(
"""
updates to execute, in order
"""
updates: [authUserSecurityKeys_updates!]!
): [authUserSecurityKeys_mutation_response]
"""
update multiples rows of table: "storage.buckets"
"""
update_buckets_many(
"""
updates to execute, in order
"""
updates: [buckets_updates!]!
): [buckets_mutation_response]
"""
update multiples rows of table: "storage.files"
"""
update_files_many(
"""
updates to execute, in order
"""
updates: [files_updates!]!
): [files_mutation_response]
"""
update data of the table: "movies"
"""
update_movies(
"""
increments the numeric columns with given value of the filtered values
"""
_inc: movies_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: movies_set_input
"""
filter the rows which have to be updated
"""
where: movies_bool_exp!
): movies_mutation_response
"""
update single row of the table: "movies"
"""
update_movies_by_pk(
"""
increments the numeric columns with given value of the filtered values
"""
_inc: movies_inc_input
"""
sets the columns of the filtered rows to the given values
"""
_set: movies_set_input
pk_columns: movies_pk_columns_input!
): movies
"""
update multiples rows of table: "movies"
"""
update_movies_many(
"""
updates to execute, in order
"""
updates: [movies_updates!]!
): [movies_mutation_response]
"""
update multiples rows of table: "auth.users"
"""
update_users_many(
"""
updates to execute, in order
"""
updates: [users_updates!]!
): [users_mutation_response]
"""
update multiples rows of table: "storage.virus"
"""
update_virus_many(
"""
updates to execute, in order
"""
updates: [virus_updates!]!
): [virus_mutation_response]
}
"""
columns and relationships of "ninja_turtles"
"""
type ninjaTurtles {
"""
An array relationship
"""
comments(
"""
distinct select on columns
"""
distinct_on: [comments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [comments_order_by!]
"""
filter the rows returned
"""
where: comments_bool_exp
): [comments!]!
"""
An aggregate relationship
"""
comments_aggregate(
"""
distinct select on columns
"""
distinct_on: [comments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [comments_order_by!]
"""
filter the rows returned
"""
where: comments_bool_exp
): comments_aggregate!
createdAt: timestamptz!
description: String
id: uuid!
name: String!
updatedAt: timestamptz!
}
"""
aggregated selection of "ninja_turtles"
"""
type ninjaTurtles_aggregate {
aggregate: ninjaTurtles_aggregate_fields
nodes: [ninjaTurtles!]!
}
"""
aggregate fields of "ninja_turtles"
"""
type ninjaTurtles_aggregate_fields {
count(columns: [ninjaTurtles_select_column!], distinct: Boolean): Int!
max: ninjaTurtles_max_fields
min: ninjaTurtles_min_fields
}
"""
Boolean expression to filter rows from the table "ninja_turtles". All fields are combined with a logical 'AND'.
"""
input ninjaTurtles_bool_exp {
_and: [ninjaTurtles_bool_exp!]
_not: ninjaTurtles_bool_exp
_or: [ninjaTurtles_bool_exp!]
comments: comments_bool_exp
comments_aggregate: comments_aggregate_bool_exp
createdAt: timestamptz_comparison_exp
description: String_comparison_exp
id: uuid_comparison_exp
name: String_comparison_exp
updatedAt: timestamptz_comparison_exp
}
"""
unique or primary key constraints on table "ninja_turtles"
"""
enum ninjaTurtles_constraint {
"""
unique or primary key constraint on columns "id"
"""
ninja_turtles_pkey
}
"""
input type for inserting data into table "ninja_turtles"
"""
input ninjaTurtles_insert_input {
comments: comments_arr_rel_insert_input
createdAt: timestamptz
description: String
id: uuid
name: String
updatedAt: timestamptz
}
"""
aggregate max on columns
"""
type ninjaTurtles_max_fields {
createdAt: timestamptz
description: String
id: uuid
name: String
updatedAt: timestamptz
}
"""
aggregate min on columns
"""
type ninjaTurtles_min_fields {
createdAt: timestamptz
description: String
id: uuid
name: String
updatedAt: timestamptz
}
"""
response of any mutation on the table "ninja_turtles"
"""
type ninjaTurtles_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [ninjaTurtles!]!
}
"""
input type for inserting object relation for remote table "ninja_turtles"
"""
input ninjaTurtles_obj_rel_insert_input {
data: ninjaTurtles_insert_input!
"""
upsert condition
"""
on_conflict: ninjaTurtles_on_conflict
}
"""
on_conflict condition type for table "ninja_turtles"
"""
input ninjaTurtles_on_conflict {
constraint: ninjaTurtles_constraint!
update_columns: [ninjaTurtles_update_column!]! = []
where: ninjaTurtles_bool_exp
}
"""
Ordering options when selecting data from "ninja_turtles".
"""
input ninjaTurtles_order_by {
comments_aggregate: comments_aggregate_order_by
createdAt: order_by
description: order_by
id: order_by
name: order_by
updatedAt: order_by
}
"""
primary key columns input for table: ninja_turtles
"""
input ninjaTurtles_pk_columns_input {
id: uuid!
}
"""
select columns of table "ninja_turtles"
"""
enum ninjaTurtles_select_column {
"""
column name
"""
createdAt
"""
column name
"""
description
"""
column name
"""
id
"""
column name
"""
name
"""
column name
"""
updatedAt
}
"""
input type for updating data in table "ninja_turtles"
"""
input ninjaTurtles_set_input {
createdAt: timestamptz
description: String
id: uuid
name: String
updatedAt: timestamptz
}
"""
Streaming cursor of the table "ninjaTurtles"
"""
input ninjaTurtles_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: ninjaTurtles_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input ninjaTurtles_stream_cursor_value_input {
createdAt: timestamptz
description: String
id: uuid
name: String
updatedAt: timestamptz
}
"""
update columns of table "ninja_turtles"
"""
enum ninjaTurtles_update_column {
"""
column name
"""
createdAt
"""
column name
"""
description
"""
column name
"""
id
"""
column name
"""
name
"""
column name
"""
updatedAt
}
input ninjaTurtles_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: ninjaTurtles_set_input
"""
filter the rows which have to be updated
"""
where: ninjaTurtles_bool_exp!
}
scalar numeric
"""
Boolean expression to compare columns of type "numeric". All fields are combined with logical 'AND'.
"""
input numeric_comparison_exp {
_eq: numeric
_gt: numeric
_gte: numeric
_in: [numeric!]
_is_null: Boolean
_lt: numeric
_lte: numeric
_neq: numeric
_nin: [numeric!]
}
"""
column ordering options
"""
enum order_by {
"""
in ascending order, nulls last
"""
asc
"""
in ascending order, nulls first
"""
asc_nulls_first
"""
in ascending order, nulls last
"""
asc_nulls_last
"""
in descending order, nulls first
"""
desc
"""
in descending order, nulls first
"""
desc_nulls_first
"""
in descending order, nulls last
"""
desc_nulls_last
}
type query_root {
"""
fetch data from the table: "attachments" using primary key columns
"""
attachment(file_id: uuid!, taskID: uuid!): attachments
"""
fetch data from the table: "attachments"
"""
attachments(
"""
distinct select on columns
"""
distinct_on: [attachments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [attachments_order_by!]
"""
filter the rows returned
"""
where: attachments_bool_exp
): [attachments!]!
"""
fetch aggregated fields from the table: "attachments"
"""
attachmentsAggregate(
"""
distinct select on columns
"""
distinct_on: [attachments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [attachments_order_by!]
"""
filter the rows returned
"""
where: attachments_bool_exp
): attachments_aggregate!
"""
fetch data from the table: "auth.providers" using primary key columns
"""
authProvider(id: String!): authProviders
"""
fetch data from the table: "auth.provider_requests" using primary key columns
"""
authProviderRequest(id: uuid!): authProviderRequests
"""
fetch data from the table: "auth.provider_requests"
"""
authProviderRequests(
"""
distinct select on columns
"""
distinct_on: [authProviderRequests_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authProviderRequests_order_by!]
"""
filter the rows returned
"""
where: authProviderRequests_bool_exp
): [authProviderRequests!]!
"""
fetch aggregated fields from the table: "auth.provider_requests"
"""
authProviderRequestsAggregate(
"""
distinct select on columns
"""
distinct_on: [authProviderRequests_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authProviderRequests_order_by!]
"""
filter the rows returned
"""
where: authProviderRequests_bool_exp
): authProviderRequests_aggregate!
"""
fetch data from the table: "auth.providers"
"""
authProviders(
"""
distinct select on columns
"""
distinct_on: [authProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authProviders_order_by!]
"""
filter the rows returned
"""
where: authProviders_bool_exp
): [authProviders!]!
"""
fetch aggregated fields from the table: "auth.providers"
"""
authProvidersAggregate(
"""
distinct select on columns
"""
distinct_on: [authProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authProviders_order_by!]
"""
filter the rows returned
"""
where: authProviders_bool_exp
): authProviders_aggregate!
"""
fetch data from the table: "auth.refresh_tokens" using primary key columns
"""
authRefreshToken(id: uuid!): authRefreshTokens
"""
fetch data from the table: "auth.refresh_token_types" using primary key columns
"""
authRefreshTokenType(value: String!): authRefreshTokenTypes
"""
fetch data from the table: "auth.refresh_token_types"
"""
authRefreshTokenTypes(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokenTypes_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokenTypes_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokenTypes_bool_exp
): [authRefreshTokenTypes!]!
"""
fetch aggregated fields from the table: "auth.refresh_token_types"
"""
authRefreshTokenTypesAggregate(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokenTypes_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokenTypes_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokenTypes_bool_exp
): authRefreshTokenTypes_aggregate!
"""
fetch data from the table: "auth.refresh_tokens"
"""
authRefreshTokens(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokens_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokens_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): [authRefreshTokens!]!
"""
fetch aggregated fields from the table: "auth.refresh_tokens"
"""
authRefreshTokensAggregate(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokens_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokens_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): authRefreshTokens_aggregate!
"""
fetch data from the table: "auth.roles" using primary key columns
"""
authRole(role: String!): authRoles
"""
fetch data from the table: "auth.roles"
"""
authRoles(
"""
distinct select on columns
"""
distinct_on: [authRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRoles_order_by!]
"""
filter the rows returned
"""
where: authRoles_bool_exp
): [authRoles!]!
"""
fetch aggregated fields from the table: "auth.roles"
"""
authRolesAggregate(
"""
distinct select on columns
"""
distinct_on: [authRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRoles_order_by!]
"""
filter the rows returned
"""
where: authRoles_bool_exp
): authRoles_aggregate!
"""
fetch data from the table: "auth.user_providers" using primary key columns
"""
authUserProvider(id: uuid!): authUserProviders
"""
fetch data from the table: "auth.user_providers"
"""
authUserProviders(
"""
distinct select on columns
"""
distinct_on: [authUserProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserProviders_order_by!]
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): [authUserProviders!]!
"""
fetch aggregated fields from the table: "auth.user_providers"
"""
authUserProvidersAggregate(
"""
distinct select on columns
"""
distinct_on: [authUserProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserProviders_order_by!]
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): authUserProviders_aggregate!
"""
fetch data from the table: "auth.user_roles" using primary key columns
"""
authUserRole(id: uuid!): authUserRoles
"""
fetch data from the table: "auth.user_roles"
"""
authUserRoles(
"""
distinct select on columns
"""
distinct_on: [authUserRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserRoles_order_by!]
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): [authUserRoles!]!
"""
fetch aggregated fields from the table: "auth.user_roles"
"""
authUserRolesAggregate(
"""
distinct select on columns
"""
distinct_on: [authUserRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserRoles_order_by!]
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): authUserRoles_aggregate!
"""
fetch data from the table: "auth.user_security_keys" using primary key columns
"""
authUserSecurityKey(id: uuid!): authUserSecurityKeys
"""
fetch data from the table: "auth.user_security_keys"
"""
authUserSecurityKeys(
"""
distinct select on columns
"""
distinct_on: [authUserSecurityKeys_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserSecurityKeys_order_by!]
"""
filter the rows returned
"""
where: authUserSecurityKeys_bool_exp
): [authUserSecurityKeys!]!
"""
fetch aggregated fields from the table: "auth.user_security_keys"
"""
authUserSecurityKeysAggregate(
"""
distinct select on columns
"""
distinct_on: [authUserSecurityKeys_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserSecurityKeys_order_by!]
"""
filter the rows returned
"""
where: authUserSecurityKeys_bool_exp
): authUserSecurityKeys_aggregate!
"""
fetch data from the table: "storage.buckets" using primary key columns
"""
bucket(id: String!): buckets
"""
fetch data from the table: "storage.buckets"
"""
buckets(
"""
distinct select on columns
"""
distinct_on: [buckets_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [buckets_order_by!]
"""
filter the rows returned
"""
where: buckets_bool_exp
): [buckets!]!
"""
fetch aggregated fields from the table: "storage.buckets"
"""
bucketsAggregate(
"""
distinct select on columns
"""
distinct_on: [buckets_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [buckets_order_by!]
"""
filter the rows returned
"""
where: buckets_bool_exp
): buckets_aggregate!
"""
fetch data from the table: "comments" using primary key columns
"""
comment(id: uuid!): comments
"""
An array relationship
"""
comments(
"""
distinct select on columns
"""
distinct_on: [comments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [comments_order_by!]
"""
filter the rows returned
"""
where: comments_bool_exp
): [comments!]!
"""
fetch aggregated fields from the table: "comments"
"""
commentsAggregate(
"""
distinct select on columns
"""
distinct_on: [comments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [comments_order_by!]
"""
filter the rows returned
"""
where: comments_bool_exp
): comments_aggregate!
"""
fetch data from the table: "storage.files" using primary key columns
"""
file(id: uuid!): files
"""
An array relationship
"""
files(
"""
distinct select on columns
"""
distinct_on: [files_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [files_order_by!]
"""
filter the rows returned
"""
where: files_bool_exp
): [files!]!
"""
fetch aggregated fields from the table: "storage.files"
"""
filesAggregate(
"""
distinct select on columns
"""
distinct_on: [files_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [files_order_by!]
"""
filter the rows returned
"""
where: files_bool_exp
): files_aggregate!
"""
fetch data from the table: "movies"
"""
movies(
"""
distinct select on columns
"""
distinct_on: [movies_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [movies_order_by!]
"""
filter the rows returned
"""
where: movies_bool_exp
): [movies!]!
"""
fetch aggregated fields from the table: "movies"
"""
movies_aggregate(
"""
distinct select on columns
"""
distinct_on: [movies_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [movies_order_by!]
"""
filter the rows returned
"""
where: movies_bool_exp
): movies_aggregate!
"""
fetch data from the table: "movies" using primary key columns
"""
movies_by_pk(id: uuid!): movies
"""
fetch data from the table: "ninja_turtles" using primary key columns
"""
ninjaTurtle(id: uuid!): ninjaTurtles
"""
fetch data from the table: "ninja_turtles"
"""
ninjaTurtles(
"""
distinct select on columns
"""
distinct_on: [ninjaTurtles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [ninjaTurtles_order_by!]
"""
filter the rows returned
"""
where: ninjaTurtles_bool_exp
): [ninjaTurtles!]!
"""
fetch aggregated fields from the table: "ninja_turtles"
"""
ninjaTurtlesAggregate(
"""
distinct select on columns
"""
distinct_on: [ninjaTurtles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [ninjaTurtles_order_by!]
"""
filter the rows returned
"""
where: ninjaTurtles_bool_exp
): ninjaTurtles_aggregate!
"""
fetch data from the table: "tasks" using primary key columns
"""
task(id: uuid!): tasks
"""
fetch data from the table: "tasks"
"""
tasks(
"""
distinct select on columns
"""
distinct_on: [tasks_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [tasks_order_by!]
"""
filter the rows returned
"""
where: tasks_bool_exp
): [tasks!]!
"""
fetch aggregated fields from the table: "tasks"
"""
tasksAggregate(
"""
distinct select on columns
"""
distinct_on: [tasks_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [tasks_order_by!]
"""
filter the rows returned
"""
where: tasks_bool_exp
): tasks_aggregate!
"""
fetch data from the table: "auth.users" using primary key columns
"""
user(id: uuid!): users
"""
fetch data from the table: "auth.users"
"""
users(
"""
distinct select on columns
"""
distinct_on: [users_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [users_order_by!]
"""
filter the rows returned
"""
where: users_bool_exp
): [users!]!
"""
fetch aggregated fields from the table: "auth.users"
"""
usersAggregate(
"""
distinct select on columns
"""
distinct_on: [users_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [users_order_by!]
"""
filter the rows returned
"""
where: users_bool_exp
): users_aggregate!
"""
fetch data from the table: "storage.virus" using primary key columns
"""
virus(id: uuid!): virus
"""
fetch data from the table: "storage.virus"
"""
viruses(
"""
distinct select on columns
"""
distinct_on: [virus_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [virus_order_by!]
"""
filter the rows returned
"""
where: virus_bool_exp
): [virus!]!
"""
fetch aggregated fields from the table: "storage.virus"
"""
virusesAggregate(
"""
distinct select on columns
"""
distinct_on: [virus_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [virus_order_by!]
"""
filter the rows returned
"""
where: virus_bool_exp
): virus_aggregate!
}
type subscription_root {
"""
fetch data from the table: "attachments" using primary key columns
"""
attachment(file_id: uuid!, taskID: uuid!): attachments
"""
fetch data from the table: "attachments"
"""
attachments(
"""
distinct select on columns
"""
distinct_on: [attachments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [attachments_order_by!]
"""
filter the rows returned
"""
where: attachments_bool_exp
): [attachments!]!
"""
fetch aggregated fields from the table: "attachments"
"""
attachmentsAggregate(
"""
distinct select on columns
"""
distinct_on: [attachments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [attachments_order_by!]
"""
filter the rows returned
"""
where: attachments_bool_exp
): attachments_aggregate!
"""
fetch data from the table in a streaming manner: "attachments"
"""
attachmentsStream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [attachments_stream_cursor_input]!
"""
filter the rows returned
"""
where: attachments_bool_exp
): [attachments!]!
"""
fetch data from the table: "auth.providers" using primary key columns
"""
authProvider(id: String!): authProviders
"""
fetch data from the table: "auth.provider_requests" using primary key columns
"""
authProviderRequest(id: uuid!): authProviderRequests
"""
fetch data from the table: "auth.provider_requests"
"""
authProviderRequests(
"""
distinct select on columns
"""
distinct_on: [authProviderRequests_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authProviderRequests_order_by!]
"""
filter the rows returned
"""
where: authProviderRequests_bool_exp
): [authProviderRequests!]!
"""
fetch aggregated fields from the table: "auth.provider_requests"
"""
authProviderRequestsAggregate(
"""
distinct select on columns
"""
distinct_on: [authProviderRequests_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authProviderRequests_order_by!]
"""
filter the rows returned
"""
where: authProviderRequests_bool_exp
): authProviderRequests_aggregate!
"""
fetch data from the table in a streaming manner: "auth.provider_requests"
"""
authProviderRequests_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [authProviderRequests_stream_cursor_input]!
"""
filter the rows returned
"""
where: authProviderRequests_bool_exp
): [authProviderRequests!]!
"""
fetch data from the table: "auth.providers"
"""
authProviders(
"""
distinct select on columns
"""
distinct_on: [authProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authProviders_order_by!]
"""
filter the rows returned
"""
where: authProviders_bool_exp
): [authProviders!]!
"""
fetch aggregated fields from the table: "auth.providers"
"""
authProvidersAggregate(
"""
distinct select on columns
"""
distinct_on: [authProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authProviders_order_by!]
"""
filter the rows returned
"""
where: authProviders_bool_exp
): authProviders_aggregate!
"""
fetch data from the table in a streaming manner: "auth.providers"
"""
authProviders_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [authProviders_stream_cursor_input]!
"""
filter the rows returned
"""
where: authProviders_bool_exp
): [authProviders!]!
"""
fetch data from the table: "auth.refresh_tokens" using primary key columns
"""
authRefreshToken(id: uuid!): authRefreshTokens
"""
fetch data from the table: "auth.refresh_token_types" using primary key columns
"""
authRefreshTokenType(value: String!): authRefreshTokenTypes
"""
fetch data from the table: "auth.refresh_token_types"
"""
authRefreshTokenTypes(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokenTypes_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokenTypes_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokenTypes_bool_exp
): [authRefreshTokenTypes!]!
"""
fetch aggregated fields from the table: "auth.refresh_token_types"
"""
authRefreshTokenTypesAggregate(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokenTypes_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokenTypes_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokenTypes_bool_exp
): authRefreshTokenTypes_aggregate!
"""
fetch data from the table in a streaming manner: "auth.refresh_token_types"
"""
authRefreshTokenTypes_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [authRefreshTokenTypes_stream_cursor_input]!
"""
filter the rows returned
"""
where: authRefreshTokenTypes_bool_exp
): [authRefreshTokenTypes!]!
"""
fetch data from the table: "auth.refresh_tokens"
"""
authRefreshTokens(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokens_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokens_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): [authRefreshTokens!]!
"""
fetch aggregated fields from the table: "auth.refresh_tokens"
"""
authRefreshTokensAggregate(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokens_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokens_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): authRefreshTokens_aggregate!
"""
fetch data from the table in a streaming manner: "auth.refresh_tokens"
"""
authRefreshTokens_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [authRefreshTokens_stream_cursor_input]!
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): [authRefreshTokens!]!
"""
fetch data from the table: "auth.roles" using primary key columns
"""
authRole(role: String!): authRoles
"""
fetch data from the table: "auth.roles"
"""
authRoles(
"""
distinct select on columns
"""
distinct_on: [authRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRoles_order_by!]
"""
filter the rows returned
"""
where: authRoles_bool_exp
): [authRoles!]!
"""
fetch aggregated fields from the table: "auth.roles"
"""
authRolesAggregate(
"""
distinct select on columns
"""
distinct_on: [authRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRoles_order_by!]
"""
filter the rows returned
"""
where: authRoles_bool_exp
): authRoles_aggregate!
"""
fetch data from the table in a streaming manner: "auth.roles"
"""
authRoles_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [authRoles_stream_cursor_input]!
"""
filter the rows returned
"""
where: authRoles_bool_exp
): [authRoles!]!
"""
fetch data from the table: "auth.user_providers" using primary key columns
"""
authUserProvider(id: uuid!): authUserProviders
"""
fetch data from the table: "auth.user_providers"
"""
authUserProviders(
"""
distinct select on columns
"""
distinct_on: [authUserProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserProviders_order_by!]
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): [authUserProviders!]!
"""
fetch aggregated fields from the table: "auth.user_providers"
"""
authUserProvidersAggregate(
"""
distinct select on columns
"""
distinct_on: [authUserProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserProviders_order_by!]
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): authUserProviders_aggregate!
"""
fetch data from the table in a streaming manner: "auth.user_providers"
"""
authUserProviders_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [authUserProviders_stream_cursor_input]!
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): [authUserProviders!]!
"""
fetch data from the table: "auth.user_roles" using primary key columns
"""
authUserRole(id: uuid!): authUserRoles
"""
fetch data from the table: "auth.user_roles"
"""
authUserRoles(
"""
distinct select on columns
"""
distinct_on: [authUserRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserRoles_order_by!]
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): [authUserRoles!]!
"""
fetch aggregated fields from the table: "auth.user_roles"
"""
authUserRolesAggregate(
"""
distinct select on columns
"""
distinct_on: [authUserRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserRoles_order_by!]
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): authUserRoles_aggregate!
"""
fetch data from the table in a streaming manner: "auth.user_roles"
"""
authUserRoles_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [authUserRoles_stream_cursor_input]!
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): [authUserRoles!]!
"""
fetch data from the table: "auth.user_security_keys" using primary key columns
"""
authUserSecurityKey(id: uuid!): authUserSecurityKeys
"""
fetch data from the table: "auth.user_security_keys"
"""
authUserSecurityKeys(
"""
distinct select on columns
"""
distinct_on: [authUserSecurityKeys_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserSecurityKeys_order_by!]
"""
filter the rows returned
"""
where: authUserSecurityKeys_bool_exp
): [authUserSecurityKeys!]!
"""
fetch aggregated fields from the table: "auth.user_security_keys"
"""
authUserSecurityKeysAggregate(
"""
distinct select on columns
"""
distinct_on: [authUserSecurityKeys_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserSecurityKeys_order_by!]
"""
filter the rows returned
"""
where: authUserSecurityKeys_bool_exp
): authUserSecurityKeys_aggregate!
"""
fetch data from the table in a streaming manner: "auth.user_security_keys"
"""
authUserSecurityKeys_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [authUserSecurityKeys_stream_cursor_input]!
"""
filter the rows returned
"""
where: authUserSecurityKeys_bool_exp
): [authUserSecurityKeys!]!
"""
fetch data from the table: "storage.buckets" using primary key columns
"""
bucket(id: String!): buckets
"""
fetch data from the table: "storage.buckets"
"""
buckets(
"""
distinct select on columns
"""
distinct_on: [buckets_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [buckets_order_by!]
"""
filter the rows returned
"""
where: buckets_bool_exp
): [buckets!]!
"""
fetch aggregated fields from the table: "storage.buckets"
"""
bucketsAggregate(
"""
distinct select on columns
"""
distinct_on: [buckets_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [buckets_order_by!]
"""
filter the rows returned
"""
where: buckets_bool_exp
): buckets_aggregate!
"""
fetch data from the table in a streaming manner: "storage.buckets"
"""
buckets_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [buckets_stream_cursor_input]!
"""
filter the rows returned
"""
where: buckets_bool_exp
): [buckets!]!
"""
fetch data from the table: "comments" using primary key columns
"""
comment(id: uuid!): comments
"""
An array relationship
"""
comments(
"""
distinct select on columns
"""
distinct_on: [comments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [comments_order_by!]
"""
filter the rows returned
"""
where: comments_bool_exp
): [comments!]!
"""
fetch aggregated fields from the table: "comments"
"""
commentsAggregate(
"""
distinct select on columns
"""
distinct_on: [comments_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [comments_order_by!]
"""
filter the rows returned
"""
where: comments_bool_exp
): comments_aggregate!
"""
fetch data from the table in a streaming manner: "comments"
"""
commentsStream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [comments_stream_cursor_input]!
"""
filter the rows returned
"""
where: comments_bool_exp
): [comments!]!
"""
fetch data from the table: "storage.files" using primary key columns
"""
file(id: uuid!): files
"""
An array relationship
"""
files(
"""
distinct select on columns
"""
distinct_on: [files_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [files_order_by!]
"""
filter the rows returned
"""
where: files_bool_exp
): [files!]!
"""
fetch aggregated fields from the table: "storage.files"
"""
filesAggregate(
"""
distinct select on columns
"""
distinct_on: [files_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [files_order_by!]
"""
filter the rows returned
"""
where: files_bool_exp
): files_aggregate!
"""
fetch data from the table in a streaming manner: "storage.files"
"""
files_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [files_stream_cursor_input]!
"""
filter the rows returned
"""
where: files_bool_exp
): [files!]!
"""
fetch data from the table: "movies"
"""
movies(
"""
distinct select on columns
"""
distinct_on: [movies_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [movies_order_by!]
"""
filter the rows returned
"""
where: movies_bool_exp
): [movies!]!
"""
fetch aggregated fields from the table: "movies"
"""
movies_aggregate(
"""
distinct select on columns
"""
distinct_on: [movies_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [movies_order_by!]
"""
filter the rows returned
"""
where: movies_bool_exp
): movies_aggregate!
"""
fetch data from the table: "movies" using primary key columns
"""
movies_by_pk(id: uuid!): movies
"""
fetch data from the table in a streaming manner: "movies"
"""
movies_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [movies_stream_cursor_input]!
"""
filter the rows returned
"""
where: movies_bool_exp
): [movies!]!
"""
fetch data from the table: "ninja_turtles" using primary key columns
"""
ninjaTurtle(id: uuid!): ninjaTurtles
"""
fetch data from the table: "ninja_turtles"
"""
ninjaTurtles(
"""
distinct select on columns
"""
distinct_on: [ninjaTurtles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [ninjaTurtles_order_by!]
"""
filter the rows returned
"""
where: ninjaTurtles_bool_exp
): [ninjaTurtles!]!
"""
fetch aggregated fields from the table: "ninja_turtles"
"""
ninjaTurtlesAggregate(
"""
distinct select on columns
"""
distinct_on: [ninjaTurtles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [ninjaTurtles_order_by!]
"""
filter the rows returned
"""
where: ninjaTurtles_bool_exp
): ninjaTurtles_aggregate!
"""
fetch data from the table in a streaming manner: "ninja_turtles"
"""
ninjaTurtlesStream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [ninjaTurtles_stream_cursor_input]!
"""
filter the rows returned
"""
where: ninjaTurtles_bool_exp
): [ninjaTurtles!]!
"""
fetch data from the table: "tasks" using primary key columns
"""
task(id: uuid!): tasks
"""
fetch data from the table: "tasks"
"""
tasks(
"""
distinct select on columns
"""
distinct_on: [tasks_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [tasks_order_by!]
"""
filter the rows returned
"""
where: tasks_bool_exp
): [tasks!]!
"""
fetch aggregated fields from the table: "tasks"
"""
tasksAggregate(
"""
distinct select on columns
"""
distinct_on: [tasks_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [tasks_order_by!]
"""
filter the rows returned
"""
where: tasks_bool_exp
): tasks_aggregate!
"""
fetch data from the table in a streaming manner: "tasks"
"""
tasksStream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [tasks_stream_cursor_input]!
"""
filter the rows returned
"""
where: tasks_bool_exp
): [tasks!]!
"""
fetch data from the table: "auth.users" using primary key columns
"""
user(id: uuid!): users
"""
fetch data from the table: "auth.users"
"""
users(
"""
distinct select on columns
"""
distinct_on: [users_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [users_order_by!]
"""
filter the rows returned
"""
where: users_bool_exp
): [users!]!
"""
fetch aggregated fields from the table: "auth.users"
"""
usersAggregate(
"""
distinct select on columns
"""
distinct_on: [users_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [users_order_by!]
"""
filter the rows returned
"""
where: users_bool_exp
): users_aggregate!
"""
fetch data from the table in a streaming manner: "auth.users"
"""
users_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [users_stream_cursor_input]!
"""
filter the rows returned
"""
where: users_bool_exp
): [users!]!
"""
fetch data from the table: "storage.virus" using primary key columns
"""
virus(id: uuid!): virus
"""
fetch data from the table in a streaming manner: "storage.virus"
"""
virus_stream(
"""
maximum number of rows returned in a single batch
"""
batch_size: Int!
"""
cursor to stream the results returned by the query
"""
cursor: [virus_stream_cursor_input]!
"""
filter the rows returned
"""
where: virus_bool_exp
): [virus!]!
"""
fetch data from the table: "storage.virus"
"""
viruses(
"""
distinct select on columns
"""
distinct_on: [virus_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [virus_order_by!]
"""
filter the rows returned
"""
where: virus_bool_exp
): [virus!]!
"""
fetch aggregated fields from the table: "storage.virus"
"""
virusesAggregate(
"""
distinct select on columns
"""
distinct_on: [virus_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [virus_order_by!]
"""
filter the rows returned
"""
where: virus_bool_exp
): virus_aggregate!
}
"""
columns and relationships of "tasks"
"""
type tasks {
completed: Boolean!
createdAt: timestamptz!
description: String!
id: uuid!
title: String!
updatedAt: timestamptz!
userID: uuid!
}
"""
aggregated selection of "tasks"
"""
type tasks_aggregate {
aggregate: tasks_aggregate_fields
nodes: [tasks!]!
}
"""
aggregate fields of "tasks"
"""
type tasks_aggregate_fields {
count(columns: [tasks_select_column!], distinct: Boolean): Int!
max: tasks_max_fields
min: tasks_min_fields
}
"""
Boolean expression to filter rows from the table "tasks". All fields are combined with a logical 'AND'.
"""
input tasks_bool_exp {
_and: [tasks_bool_exp!]
_not: tasks_bool_exp
_or: [tasks_bool_exp!]
completed: Boolean_comparison_exp
createdAt: timestamptz_comparison_exp
description: String_comparison_exp
id: uuid_comparison_exp
title: String_comparison_exp
updatedAt: timestamptz_comparison_exp
userID: uuid_comparison_exp
}
"""
unique or primary key constraints on table "tasks"
"""
enum tasks_constraint {
"""
unique or primary key constraint on columns "id"
"""
tasks_pkey
}
"""
input type for inserting data into table "tasks"
"""
input tasks_insert_input {
completed: Boolean
createdAt: timestamptz
description: String
id: uuid
title: String
updatedAt: timestamptz
userID: uuid
}
"""
aggregate max on columns
"""
type tasks_max_fields {
createdAt: timestamptz
description: String
id: uuid
title: String
updatedAt: timestamptz
userID: uuid
}
"""
aggregate min on columns
"""
type tasks_min_fields {
createdAt: timestamptz
description: String
id: uuid
title: String
updatedAt: timestamptz
userID: uuid
}
"""
response of any mutation on the table "tasks"
"""
type tasks_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [tasks!]!
}
"""
input type for inserting object relation for remote table "tasks"
"""
input tasks_obj_rel_insert_input {
data: tasks_insert_input!
"""
upsert condition
"""
on_conflict: tasks_on_conflict
}
"""
on_conflict condition type for table "tasks"
"""
input tasks_on_conflict {
constraint: tasks_constraint!
update_columns: [tasks_update_column!]! = []
where: tasks_bool_exp
}
"""
Ordering options when selecting data from "tasks".
"""
input tasks_order_by {
completed: order_by
createdAt: order_by
description: order_by
id: order_by
title: order_by
updatedAt: order_by
userID: order_by
}
"""
primary key columns input for table: tasks
"""
input tasks_pk_columns_input {
id: uuid!
}
"""
select columns of table "tasks"
"""
enum tasks_select_column {
"""
column name
"""
completed
"""
column name
"""
createdAt
"""
column name
"""
description
"""
column name
"""
id
"""
column name
"""
title
"""
column name
"""
updatedAt
"""
column name
"""
userID
}
"""
input type for updating data in table "tasks"
"""
input tasks_set_input {
completed: Boolean
createdAt: timestamptz
description: String
id: uuid
title: String
updatedAt: timestamptz
userID: uuid
}
"""
Streaming cursor of the table "tasks"
"""
input tasks_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: tasks_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input tasks_stream_cursor_value_input {
completed: Boolean
createdAt: timestamptz
description: String
id: uuid
title: String
updatedAt: timestamptz
userID: uuid
}
"""
update columns of table "tasks"
"""
enum tasks_update_column {
"""
column name
"""
completed
"""
column name
"""
createdAt
"""
column name
"""
description
"""
column name
"""
id
"""
column name
"""
title
"""
column name
"""
updatedAt
"""
column name
"""
userID
}
input tasks_updates {
"""
sets the columns of the filtered rows to the given values
"""
_set: tasks_set_input
"""
filter the rows which have to be updated
"""
where: tasks_bool_exp!
}
scalar timestamptz
"""
Boolean expression to compare columns of type "timestamptz". All fields are combined with logical 'AND'.
"""
input timestamptz_comparison_exp {
_eq: timestamptz
_gt: timestamptz
_gte: timestamptz
_in: [timestamptz!]
_is_null: Boolean
_lt: timestamptz
_lte: timestamptz
_neq: timestamptz
_nin: [timestamptz!]
}
"""
User account information. Don't modify its structure as Hasura Auth relies on it to function properly.
"""
type users {
activeMfaType: String
avatarUrl: String!
createdAt: timestamptz!
currentChallenge: String
defaultRole: String!
"""
An object relationship
"""
defaultRoleByRole: authRoles!
disabled: Boolean!
displayName: String!
email: citext
emailVerified: Boolean!
id: uuid!
isAnonymous: Boolean!
lastSeen: timestamptz
locale: String!
metadata(
"""
JSON select path
"""
path: String
): jsonb
newEmail: citext
otpHash: String
otpHashExpiresAt: timestamptz!
otpMethodLastUsed: String
passwordHash: String
phoneNumber: String
phoneNumberVerified: Boolean!
"""
An array relationship
"""
refreshTokens(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokens_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokens_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): [authRefreshTokens!]!
"""
An aggregate relationship
"""
refreshTokens_aggregate(
"""
distinct select on columns
"""
distinct_on: [authRefreshTokens_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authRefreshTokens_order_by!]
"""
filter the rows returned
"""
where: authRefreshTokens_bool_exp
): authRefreshTokens_aggregate!
"""
An array relationship
"""
roles(
"""
distinct select on columns
"""
distinct_on: [authUserRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserRoles_order_by!]
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): [authUserRoles!]!
"""
An aggregate relationship
"""
roles_aggregate(
"""
distinct select on columns
"""
distinct_on: [authUserRoles_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserRoles_order_by!]
"""
filter the rows returned
"""
where: authUserRoles_bool_exp
): authUserRoles_aggregate!
"""
An array relationship
"""
securityKeys(
"""
distinct select on columns
"""
distinct_on: [authUserSecurityKeys_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserSecurityKeys_order_by!]
"""
filter the rows returned
"""
where: authUserSecurityKeys_bool_exp
): [authUserSecurityKeys!]!
"""
An aggregate relationship
"""
securityKeys_aggregate(
"""
distinct select on columns
"""
distinct_on: [authUserSecurityKeys_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserSecurityKeys_order_by!]
"""
filter the rows returned
"""
where: authUserSecurityKeys_bool_exp
): authUserSecurityKeys_aggregate!
ticket: String
ticketExpiresAt: timestamptz!
totpSecret: String
updatedAt: timestamptz!
"""
An array relationship
"""
userProviders(
"""
distinct select on columns
"""
distinct_on: [authUserProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserProviders_order_by!]
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): [authUserProviders!]!
"""
An aggregate relationship
"""
userProviders_aggregate(
"""
distinct select on columns
"""
distinct_on: [authUserProviders_select_column!]
"""
limit the number of rows returned
"""
limit: Int
"""
skip the first n rows. Use only with order_by
"""
offset: Int
"""
sort the rows by one or more columns
"""
order_by: [authUserProviders_order_by!]
"""
filter the rows returned
"""
where: authUserProviders_bool_exp
): authUserProviders_aggregate!
}
"""
aggregated selection of "auth.users"
"""
type users_aggregate {
aggregate: users_aggregate_fields
nodes: [users!]!
}
input users_aggregate_bool_exp {
bool_and: users_aggregate_bool_exp_bool_and
bool_or: users_aggregate_bool_exp_bool_or
count: users_aggregate_bool_exp_count
}
input users_aggregate_bool_exp_bool_and {
arguments: users_select_column_users_aggregate_bool_exp_bool_and_arguments_columns!
distinct: Boolean
filter: users_bool_exp
predicate: Boolean_comparison_exp!
}
input users_aggregate_bool_exp_bool_or {
arguments: users_select_column_users_aggregate_bool_exp_bool_or_arguments_columns!
distinct: Boolean
filter: users_bool_exp
predicate: Boolean_comparison_exp!
}
input users_aggregate_bool_exp_count {
arguments: [users_select_column!]
distinct: Boolean
filter: users_bool_exp
predicate: Int_comparison_exp!
}
"""
aggregate fields of "auth.users"
"""
type users_aggregate_fields {
count(columns: [users_select_column!], distinct: Boolean): Int!
max: users_max_fields
min: users_min_fields
}
"""
order by aggregate values of table "auth.users"
"""
input users_aggregate_order_by {
count: order_by
max: users_max_order_by
min: users_min_order_by
}
"""
append existing jsonb value of filtered columns with new jsonb value
"""
input users_append_input {
metadata: jsonb
}
"""
input type for inserting array relation for remote table "auth.users"
"""
input users_arr_rel_insert_input {
data: [users_insert_input!]!
"""
upsert condition
"""
on_conflict: users_on_conflict
}
"""
Boolean expression to filter rows from the table "auth.users". All fields are combined with a logical 'AND'.
"""
input users_bool_exp {
_and: [users_bool_exp!]
_not: users_bool_exp
_or: [users_bool_exp!]
activeMfaType: String_comparison_exp
avatarUrl: String_comparison_exp
createdAt: timestamptz_comparison_exp
currentChallenge: String_comparison_exp
defaultRole: String_comparison_exp
defaultRoleByRole: authRoles_bool_exp
disabled: Boolean_comparison_exp
displayName: String_comparison_exp
email: citext_comparison_exp
emailVerified: Boolean_comparison_exp
id: uuid_comparison_exp
isAnonymous: Boolean_comparison_exp
lastSeen: timestamptz_comparison_exp
locale: String_comparison_exp
metadata: jsonb_comparison_exp
newEmail: citext_comparison_exp
otpHash: String_comparison_exp
otpHashExpiresAt: timestamptz_comparison_exp
otpMethodLastUsed: String_comparison_exp
passwordHash: String_comparison_exp
phoneNumber: String_comparison_exp
phoneNumberVerified: Boolean_comparison_exp
refreshTokens: authRefreshTokens_bool_exp
refreshTokens_aggregate: authRefreshTokens_aggregate_bool_exp
roles: authUserRoles_bool_exp
roles_aggregate: authUserRoles_aggregate_bool_exp
securityKeys: authUserSecurityKeys_bool_exp
securityKeys_aggregate: authUserSecurityKeys_aggregate_bool_exp
ticket: String_comparison_exp
ticketExpiresAt: timestamptz_comparison_exp
totpSecret: String_comparison_exp
updatedAt: timestamptz_comparison_exp
userProviders: authUserProviders_bool_exp
userProviders_aggregate: authUserProviders_aggregate_bool_exp
}
"""
unique or primary key constraints on table "auth.users"
"""
enum users_constraint {
"""
unique or primary key constraint on columns "email"
"""
users_email_key
"""
unique or primary key constraint on columns "phone_number"
"""
users_phone_number_key
"""
unique or primary key constraint on columns "id"
"""
users_pkey
}
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
input users_delete_at_path_input {
metadata: [String!]
}
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
input users_delete_elem_input {
metadata: Int
}
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
input users_delete_key_input {
metadata: String
}
"""
input type for inserting data into table "auth.users"
"""
input users_insert_input {
activeMfaType: String
avatarUrl: String
createdAt: timestamptz
currentChallenge: String
defaultRole: String
defaultRoleByRole: authRoles_obj_rel_insert_input
disabled: Boolean
displayName: String
email: citext
emailVerified: Boolean
id: uuid
isAnonymous: Boolean
lastSeen: timestamptz
locale: String
metadata: jsonb
newEmail: citext
otpHash: String
otpHashExpiresAt: timestamptz
otpMethodLastUsed: String
passwordHash: String
phoneNumber: String
phoneNumberVerified: Boolean
refreshTokens: authRefreshTokens_arr_rel_insert_input
roles: authUserRoles_arr_rel_insert_input
securityKeys: authUserSecurityKeys_arr_rel_insert_input
ticket: String
ticketExpiresAt: timestamptz
totpSecret: String
updatedAt: timestamptz
userProviders: authUserProviders_arr_rel_insert_input
}
"""
aggregate max on columns
"""
type users_max_fields {
activeMfaType: String
avatarUrl: String
createdAt: timestamptz
currentChallenge: String
defaultRole: String
displayName: String
email: citext
id: uuid
lastSeen: timestamptz
locale: String
newEmail: citext
otpHash: String
otpHashExpiresAt: timestamptz
otpMethodLastUsed: String
passwordHash: String
phoneNumber: String
ticket: String
ticketExpiresAt: timestamptz
totpSecret: String
updatedAt: timestamptz
}
"""
order by max() on columns of table "auth.users"
"""
input users_max_order_by {
activeMfaType: order_by
avatarUrl: order_by
createdAt: order_by
currentChallenge: order_by
defaultRole: order_by
displayName: order_by
email: order_by
id: order_by
lastSeen: order_by
locale: order_by
newEmail: order_by
otpHash: order_by
otpHashExpiresAt: order_by
otpMethodLastUsed: order_by
passwordHash: order_by
phoneNumber: order_by
ticket: order_by
ticketExpiresAt: order_by
totpSecret: order_by
updatedAt: order_by
}
"""
aggregate min on columns
"""
type users_min_fields {
activeMfaType: String
avatarUrl: String
createdAt: timestamptz
currentChallenge: String
defaultRole: String
displayName: String
email: citext
id: uuid
lastSeen: timestamptz
locale: String
newEmail: citext
otpHash: String
otpHashExpiresAt: timestamptz
otpMethodLastUsed: String
passwordHash: String
phoneNumber: String
ticket: String
ticketExpiresAt: timestamptz
totpSecret: String
updatedAt: timestamptz
}
"""
order by min() on columns of table "auth.users"
"""
input users_min_order_by {
activeMfaType: order_by
avatarUrl: order_by
createdAt: order_by
currentChallenge: order_by
defaultRole: order_by
displayName: order_by
email: order_by
id: order_by
lastSeen: order_by
locale: order_by
newEmail: order_by
otpHash: order_by
otpHashExpiresAt: order_by
otpMethodLastUsed: order_by
passwordHash: order_by
phoneNumber: order_by
ticket: order_by
ticketExpiresAt: order_by
totpSecret: order_by
updatedAt: order_by
}
"""
response of any mutation on the table "auth.users"
"""
type users_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [users!]!
}
"""
input type for inserting object relation for remote table "auth.users"
"""
input users_obj_rel_insert_input {
data: users_insert_input!
"""
upsert condition
"""
on_conflict: users_on_conflict
}
"""
on_conflict condition type for table "auth.users"
"""
input users_on_conflict {
constraint: users_constraint!
update_columns: [users_update_column!]! = []
where: users_bool_exp
}
"""
Ordering options when selecting data from "auth.users".
"""
input users_order_by {
activeMfaType: order_by
avatarUrl: order_by
createdAt: order_by
currentChallenge: order_by
defaultRole: order_by
defaultRoleByRole: authRoles_order_by
disabled: order_by
displayName: order_by
email: order_by
emailVerified: order_by
id: order_by
isAnonymous: order_by
lastSeen: order_by
locale: order_by
metadata: order_by
newEmail: order_by
otpHash: order_by
otpHashExpiresAt: order_by
otpMethodLastUsed: order_by
passwordHash: order_by
phoneNumber: order_by
phoneNumberVerified: order_by
refreshTokens_aggregate: authRefreshTokens_aggregate_order_by
roles_aggregate: authUserRoles_aggregate_order_by
securityKeys_aggregate: authUserSecurityKeys_aggregate_order_by
ticket: order_by
ticketExpiresAt: order_by
totpSecret: order_by
updatedAt: order_by
userProviders_aggregate: authUserProviders_aggregate_order_by
}
"""
primary key columns input for table: auth.users
"""
input users_pk_columns_input {
id: uuid!
}
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
input users_prepend_input {
metadata: jsonb
}
"""
select columns of table "auth.users"
"""
enum users_select_column {
"""
column name
"""
activeMfaType
"""
column name
"""
avatarUrl
"""
column name
"""
createdAt
"""
column name
"""
currentChallenge
"""
column name
"""
defaultRole
"""
column name
"""
disabled
"""
column name
"""
displayName
"""
column name
"""
email
"""
column name
"""
emailVerified
"""
column name
"""
id
"""
column name
"""
isAnonymous
"""
column name
"""
lastSeen
"""
column name
"""
locale
"""
column name
"""
metadata
"""
column name
"""
newEmail
"""
column name
"""
otpHash
"""
column name
"""
otpHashExpiresAt
"""
column name
"""
otpMethodLastUsed
"""
column name
"""
passwordHash
"""
column name
"""
phoneNumber
"""
column name
"""
phoneNumberVerified
"""
column name
"""
ticket
"""
column name
"""
ticketExpiresAt
"""
column name
"""
totpSecret
"""
column name
"""
updatedAt
}
"""
select "users_aggregate_bool_exp_bool_and_arguments_columns" columns of table "auth.users"
"""
enum users_select_column_users_aggregate_bool_exp_bool_and_arguments_columns {
"""
column name
"""
disabled
"""
column name
"""
emailVerified
"""
column name
"""
isAnonymous
"""
column name
"""
phoneNumberVerified
}
"""
select "users_aggregate_bool_exp_bool_or_arguments_columns" columns of table "auth.users"
"""
enum users_select_column_users_aggregate_bool_exp_bool_or_arguments_columns {
"""
column name
"""
disabled
"""
column name
"""
emailVerified
"""
column name
"""
isAnonymous
"""
column name
"""
phoneNumberVerified
}
"""
input type for updating data in table "auth.users"
"""
input users_set_input {
activeMfaType: String
avatarUrl: String
createdAt: timestamptz
currentChallenge: String
defaultRole: String
disabled: Boolean
displayName: String
email: citext
emailVerified: Boolean
id: uuid
isAnonymous: Boolean
lastSeen: timestamptz
locale: String
metadata: jsonb
newEmail: citext
otpHash: String
otpHashExpiresAt: timestamptz
otpMethodLastUsed: String
passwordHash: String
phoneNumber: String
phoneNumberVerified: Boolean
ticket: String
ticketExpiresAt: timestamptz
totpSecret: String
updatedAt: timestamptz
}
"""
Streaming cursor of the table "users"
"""
input users_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: users_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input users_stream_cursor_value_input {
activeMfaType: String
avatarUrl: String
createdAt: timestamptz
currentChallenge: String
defaultRole: String
disabled: Boolean
displayName: String
email: citext
emailVerified: Boolean
id: uuid
isAnonymous: Boolean
lastSeen: timestamptz
locale: String
metadata: jsonb
newEmail: citext
otpHash: String
otpHashExpiresAt: timestamptz
otpMethodLastUsed: String
passwordHash: String
phoneNumber: String
phoneNumberVerified: Boolean
ticket: String
ticketExpiresAt: timestamptz
totpSecret: String
updatedAt: timestamptz
}
"""
update columns of table "auth.users"
"""
enum users_update_column {
"""
column name
"""
activeMfaType
"""
column name
"""
avatarUrl
"""
column name
"""
createdAt
"""
column name
"""
currentChallenge
"""
column name
"""
defaultRole
"""
column name
"""
disabled
"""
column name
"""
displayName
"""
column name
"""
email
"""
column name
"""
emailVerified
"""
column name
"""
id
"""
column name
"""
isAnonymous
"""
column name
"""
lastSeen
"""
column name
"""
locale
"""
column name
"""
metadata
"""
column name
"""
newEmail
"""
column name
"""
otpHash
"""
column name
"""
otpHashExpiresAt
"""
column name
"""
otpMethodLastUsed
"""
column name
"""
passwordHash
"""
column name
"""
phoneNumber
"""
column name
"""
phoneNumberVerified
"""
column name
"""
ticket
"""
column name
"""
ticketExpiresAt
"""
column name
"""
totpSecret
"""
column name
"""
updatedAt
}
input users_updates {
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: users_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: users_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: users_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: users_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: users_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: users_set_input
"""
filter the rows which have to be updated
"""
where: users_bool_exp!
}
scalar uuid
"""
Boolean expression to compare columns of type "uuid". All fields are combined with logical 'AND'.
"""
input uuid_comparison_exp {
_eq: uuid
_gt: uuid
_gte: uuid
_in: [uuid!]
_is_null: Boolean
_lt: uuid
_lte: uuid
_neq: uuid
_nin: [uuid!]
}
"""
columns and relationships of "storage.virus"
"""
type virus {
createdAt: timestamptz!
"""
An object relationship
"""
file: files!
fileId: uuid!
filename: String!
id: uuid!
updatedAt: timestamptz!
userSession(
"""
JSON select path
"""
path: String
): jsonb!
virus: String!
}
"""
aggregated selection of "storage.virus"
"""
type virus_aggregate {
aggregate: virus_aggregate_fields
nodes: [virus!]!
}
"""
aggregate fields of "storage.virus"
"""
type virus_aggregate_fields {
count(columns: [virus_select_column!], distinct: Boolean): Int!
max: virus_max_fields
min: virus_min_fields
}
"""
append existing jsonb value of filtered columns with new jsonb value
"""
input virus_append_input {
userSession: jsonb
}
"""
Boolean expression to filter rows from the table "storage.virus". All fields are combined with a logical 'AND'.
"""
input virus_bool_exp {
_and: [virus_bool_exp!]
_not: virus_bool_exp
_or: [virus_bool_exp!]
createdAt: timestamptz_comparison_exp
file: files_bool_exp
fileId: uuid_comparison_exp
filename: String_comparison_exp
id: uuid_comparison_exp
updatedAt: timestamptz_comparison_exp
userSession: jsonb_comparison_exp
virus: String_comparison_exp
}
"""
unique or primary key constraints on table "storage.virus"
"""
enum virus_constraint {
"""
unique or primary key constraint on columns "id"
"""
virus_pkey
}
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
input virus_delete_at_path_input {
userSession: [String!]
}
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
input virus_delete_elem_input {
userSession: Int
}
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
input virus_delete_key_input {
userSession: String
}
"""
input type for inserting data into table "storage.virus"
"""
input virus_insert_input {
createdAt: timestamptz
file: files_obj_rel_insert_input
fileId: uuid
filename: String
id: uuid
updatedAt: timestamptz
userSession: jsonb
virus: String
}
"""
aggregate max on columns
"""
type virus_max_fields {
createdAt: timestamptz
fileId: uuid
filename: String
id: uuid
updatedAt: timestamptz
virus: String
}
"""
aggregate min on columns
"""
type virus_min_fields {
createdAt: timestamptz
fileId: uuid
filename: String
id: uuid
updatedAt: timestamptz
virus: String
}
"""
response of any mutation on the table "storage.virus"
"""
type virus_mutation_response {
"""
number of rows affected by the mutation
"""
affected_rows: Int!
"""
data from the rows affected by the mutation
"""
returning: [virus!]!
}
"""
on_conflict condition type for table "storage.virus"
"""
input virus_on_conflict {
constraint: virus_constraint!
update_columns: [virus_update_column!]! = []
where: virus_bool_exp
}
"""
Ordering options when selecting data from "storage.virus".
"""
input virus_order_by {
createdAt: order_by
file: files_order_by
fileId: order_by
filename: order_by
id: order_by
updatedAt: order_by
userSession: order_by
virus: order_by
}
"""
primary key columns input for table: storage.virus
"""
input virus_pk_columns_input {
id: uuid!
}
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
input virus_prepend_input {
userSession: jsonb
}
"""
select columns of table "storage.virus"
"""
enum virus_select_column {
"""
column name
"""
createdAt
"""
column name
"""
fileId
"""
column name
"""
filename
"""
column name
"""
id
"""
column name
"""
updatedAt
"""
column name
"""
userSession
"""
column name
"""
virus
}
"""
input type for updating data in table "storage.virus"
"""
input virus_set_input {
createdAt: timestamptz
fileId: uuid
filename: String
id: uuid
updatedAt: timestamptz
userSession: jsonb
virus: String
}
"""
Streaming cursor of the table "virus"
"""
input virus_stream_cursor_input {
"""
Stream column input with initial value
"""
initial_value: virus_stream_cursor_value_input!
"""
cursor ordering
"""
ordering: cursor_ordering
}
"""
Initial value of the column from where the streaming should start
"""
input virus_stream_cursor_value_input {
createdAt: timestamptz
fileId: uuid
filename: String
id: uuid
updatedAt: timestamptz
userSession: jsonb
virus: String
}
"""
update columns of table "storage.virus"
"""
enum virus_update_column {
"""
column name
"""
createdAt
"""
column name
"""
fileId
"""
column name
"""
filename
"""
column name
"""
id
"""
column name
"""
updatedAt
"""
column name
"""
userSession
"""
column name
"""
virus
}
input virus_updates {
"""
append existing jsonb value of filtered columns with new jsonb value
"""
_append: virus_append_input
"""
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
"""
_delete_at_path: virus_delete_at_path_input
"""
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
"""
_delete_elem: virus_delete_elem_input
"""
delete key/value pair or string element. key/value pairs are matched based on their key value
"""
_delete_key: virus_delete_key_input
"""
prepend existing jsonb value of filtered columns with new jsonb value
"""
_prepend: virus_prepend_input
"""
sets the columns of the filtered rows to the given values
"""
_set: virus_set_input
"""
filter the rows which have to be updated
"""
where: virus_bool_exp!
}