Documentation
¶
Index ¶
- Constants
- Variables
- func WithHeaders(header http.Header) clientv2.RequestInterceptor
- type BooleanComparisonExp
- type BucketMetadataFragment
- func (t *BucketMetadataFragment) GetCacheControl() *string
- func (t *BucketMetadataFragment) GetCreatedAt() string
- func (t *BucketMetadataFragment) GetDownloadExpiration() int64
- func (t *BucketMetadataFragment) GetID() string
- func (t *BucketMetadataFragment) GetMaxUploadFileSize() int64
- func (t *BucketMetadataFragment) GetMinUploadFileSize() int64
- func (t *BucketMetadataFragment) GetPresignedUrlsEnabled() bool
- func (t *BucketMetadataFragment) GetUpdatedAt() string
- func (md *BucketMetadataFragment) ToControllerType() controller.BucketMetadata
- type Buckets
- type BucketsAggregate
- type BucketsAggregateFields
- type BucketsAvgFields
- type BucketsBoolExp
- type BucketsConstraint
- type BucketsIncInput
- type BucketsInsertInput
- type BucketsMaxFields
- type BucketsMinFields
- type BucketsMutationResponse
- type BucketsObjRelInsertInput
- type BucketsOnConflict
- type BucketsOrderBy
- type BucketsPkColumnsInput
- type BucketsSelectColumn
- type BucketsSetInput
- type BucketsStddevFields
- type BucketsStddevPopFields
- type BucketsStddevSampFields
- type BucketsStreamCursorInput
- type BucketsStreamCursorValueInput
- type BucketsSumFields
- type BucketsUpdateColumn
- type BucketsUpdates
- type BucketsVarPopFields
- type BucketsVarSampFields
- type BucketsVarianceFields
- type Client
- func (c *Client) DeleteFile(ctx context.Context, id string, interceptors ...clientv2.RequestInterceptor) (*DeleteFile, error)
- func (c *Client) GetBucket(ctx context.Context, id string, interceptors ...clientv2.RequestInterceptor) (*GetBucket, error)
- func (c *Client) GetFile(ctx context.Context, id string, interceptors ...clientv2.RequestInterceptor) (*GetFile, error)
- func (c *Client) InsertFile(ctx context.Context, object FilesInsertInput, ...) (*InsertFile, error)
- func (c *Client) InsertVirus(ctx context.Context, object VirusInsertInput, ...) (*InsertVirus, error)
- func (c *Client) ListFilesSummary(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*ListFilesSummary, error)
- func (c *Client) UpdateFile(ctx context.Context, id string, set FilesSetInput, ...) (*UpdateFile, error)
- type CursorOrdering
- type DeleteFile
- type DeleteFile_DeleteFile
- type FileMetadataFragment
- func (t *FileMetadataFragment) GetBucketID() string
- func (t *FileMetadataFragment) GetCreatedAt() string
- func (t *FileMetadataFragment) GetEtag() *string
- func (t *FileMetadataFragment) GetID() string
- func (t *FileMetadataFragment) GetIsUploaded() *bool
- func (t *FileMetadataFragment) GetMetadata() map[string]interface{}
- func (t *FileMetadataFragment) GetMimeType() *string
- func (t *FileMetadataFragment) GetName() *string
- func (t *FileMetadataFragment) GetSize() *int64
- func (t *FileMetadataFragment) GetUpdatedAt() string
- func (t *FileMetadataFragment) GetUploadedByUserID() *string
- func (md *FileMetadataFragment) ToControllerType() controller.FileMetadata
- type FileMetadataSummaryFragment
- func (t *FileMetadataSummaryFragment) GetBucketID() string
- func (t *FileMetadataSummaryFragment) GetID() string
- func (t *FileMetadataSummaryFragment) GetIsUploaded() *bool
- func (t *FileMetadataSummaryFragment) GetName() *string
- func (md *FileMetadataSummaryFragment) ToControllerType() controller.FileSummary
- type Files
- type FilesAggregate
- type FilesAggregateBoolExp
- type FilesAggregateBoolExpBoolAnd
- type FilesAggregateBoolExpBoolOr
- type FilesAggregateBoolExpCount
- type FilesAggregateFields
- type FilesAggregateOrderBy
- type FilesAppendInput
- type FilesArrRelInsertInput
- type FilesAvgFields
- type FilesAvgOrderBy
- type FilesBoolExp
- type FilesConstraint
- type FilesDeleteAtPathInput
- type FilesDeleteElemInput
- type FilesDeleteKeyInput
- type FilesIncInput
- type FilesInsertInput
- type FilesMaxFields
- type FilesMaxOrderBy
- type FilesMinFields
- type FilesMinOrderBy
- type FilesMutationResponse
- type FilesObjRelInsertInput
- type FilesOnConflict
- type FilesOrderBy
- type FilesPkColumnsInput
- type FilesPrependInput
- type FilesSelectColumn
- type FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns
- func (e FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) IsValid() bool
- func (e FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) MarshalGQL(w io.Writer)
- func (e FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) String() string
- func (e *FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) UnmarshalGQL(v interface{}) error
- type FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns
- func (e FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) IsValid() bool
- func (e FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) MarshalGQL(w io.Writer)
- func (e FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) String() string
- func (e *FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) UnmarshalGQL(v interface{}) error
- type FilesSetInput
- type FilesStddevFields
- type FilesStddevOrderBy
- type FilesStddevPopFields
- type FilesStddevPopOrderBy
- type FilesStddevSampFields
- type FilesStddevSampOrderBy
- type FilesStreamCursorInput
- type FilesStreamCursorValueInput
- type FilesSumFields
- type FilesSumOrderBy
- type FilesUpdateColumn
- type FilesUpdates
- type FilesVarPopFields
- type FilesVarPopOrderBy
- type FilesVarSampFields
- type FilesVarSampOrderBy
- type FilesVarianceFields
- type FilesVarianceOrderBy
- type GetBucket
- type GetFile
- type Hasura
- func (h *Hasura) DeleteFileByID(ctx context.Context, fileID string, headers http.Header) *controller.APIError
- func (h *Hasura) GetBucketByID(ctx context.Context, bucketID string, headers http.Header) (controller.BucketMetadata, *controller.APIError)
- func (h *Hasura) GetFileByID(ctx context.Context, fileID string, headers http.Header) (controller.FileMetadata, *controller.APIError)
- func (h *Hasura) InitializeFile(ctx context.Context, fileID, name string, size int64, ...) *controller.APIError
- func (h *Hasura) InsertVirus(ctx context.Context, fileID, filename, virus string, ...) *controller.APIError
- func (h *Hasura) ListFiles(ctx context.Context, headers http.Header) ([]controller.FileSummary, *controller.APIError)
- func (h *Hasura) PopulateMetadata(ctx context.Context, fileID, name string, size int64, bucketID, etag string, ...) (controller.FileMetadata, *controller.APIError)
- func (h *Hasura) SetIsUploaded(ctx context.Context, fileID string, isUploaded bool, headers http.Header) *controller.APIError
- type InsertFile
- type InsertFile_InsertFile
- type InsertVirus
- type InsertVirus_InsertVirus
- type IntComparisonExp
- type JsonbCastExp
- type JsonbComparisonExp
- type ListFilesSummary
- type MutationRoot
- type OrderBy
- type QueryRoot
- type StringComparisonExp
- type SubscriptionRoot
- type TimestamptzComparisonExp
- type UUIDComparisonExp
- type UpdateFile
- type Virus
- type VirusAggregate
- type VirusAggregateFields
- type VirusAppendInput
- type VirusBoolExp
- type VirusConstraint
- type VirusDeleteAtPathInput
- type VirusDeleteElemInput
- type VirusDeleteKeyInput
- type VirusInsertInput
- type VirusMaxFields
- type VirusMinFields
- type VirusMutationResponse
- type VirusOnConflict
- type VirusOrderBy
- type VirusPkColumnsInput
- type VirusPrependInput
- type VirusSelectColumn
- type VirusSetInput
- type VirusStreamCursorInput
- type VirusStreamCursorValueInput
- type VirusUpdateColumn
- type VirusUpdates
Constants ¶
const DeleteFileDocument = `mutation DeleteFile ($id: uuid!) {
deleteFile(id: $id) {
id
}
}
`
const GetBucketDocument = `` /* 253-byte string literal not displayed */
const GetFileDocument = `` /* 225-byte string literal not displayed */
const InsertFileDocument = `mutation InsertFile ($object: files_insert_input!) {
insertFile(object: $object) {
id
}
}
`
const InsertVirusDocument = `mutation InsertVirus ($object: virus_insert_input!) {
insertVirus(object: $object) {
id
}
}
`
const ListFilesSummaryDocument = `` /* 155-byte string literal not displayed */
const UpdateFileDocument = `` /* 288-byte string literal not displayed */
Variables ¶
var AllBucketsConstraint = []BucketsConstraint{ BucketsConstraintBucketsPkey, }
var AllBucketsSelectColumn = []BucketsSelectColumn{ BucketsSelectColumnCacheControl, BucketsSelectColumnCreatedAt, BucketsSelectColumnDownloadExpiration, BucketsSelectColumnID, BucketsSelectColumnMaxUploadFileSize, BucketsSelectColumnMinUploadFileSize, BucketsSelectColumnPresignedUrlsEnabled, BucketsSelectColumnUpdatedAt, }
var AllBucketsUpdateColumn = []BucketsUpdateColumn{ BucketsUpdateColumnCacheControl, BucketsUpdateColumnCreatedAt, BucketsUpdateColumnDownloadExpiration, BucketsUpdateColumnID, BucketsUpdateColumnMaxUploadFileSize, BucketsUpdateColumnMinUploadFileSize, BucketsUpdateColumnPresignedUrlsEnabled, BucketsUpdateColumnUpdatedAt, }
var AllCursorOrdering = []CursorOrdering{ CursorOrderingAsc, CursorOrderingDesc, }
var AllFilesConstraint = []FilesConstraint{ FilesConstraintFilesPkey, }
var AllFilesSelectColumn = []FilesSelectColumn{ FilesSelectColumnBucketID, FilesSelectColumnCreatedAt, FilesSelectColumnEtag, FilesSelectColumnID, FilesSelectColumnIsUploaded, FilesSelectColumnMetadata, FilesSelectColumnMimeType, FilesSelectColumnName, FilesSelectColumnSize, FilesSelectColumnUpdatedAt, FilesSelectColumnUploadedByUserID, }
var AllFilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns = []FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns{ FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumnsIsUploaded, }
var AllFilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns = []FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns{ FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumnsIsUploaded, }
var AllFilesUpdateColumn = []FilesUpdateColumn{ FilesUpdateColumnBucketID, FilesUpdateColumnCreatedAt, FilesUpdateColumnEtag, FilesUpdateColumnID, FilesUpdateColumnIsUploaded, FilesUpdateColumnMetadata, FilesUpdateColumnMimeType, FilesUpdateColumnName, FilesUpdateColumnSize, FilesUpdateColumnUpdatedAt, FilesUpdateColumnUploadedByUserID, }
var AllOrderBy = []OrderBy{ OrderByAsc, OrderByAscNullsFirst, OrderByAscNullsLast, OrderByDesc, OrderByDescNullsFirst, OrderByDescNullsLast, }
var AllVirusConstraint = []VirusConstraint{ VirusConstraintVirusPkey, }
var AllVirusSelectColumn = []VirusSelectColumn{ VirusSelectColumnCreatedAt, VirusSelectColumnFileID, VirusSelectColumnFilename, VirusSelectColumnID, VirusSelectColumnUpdatedAt, VirusSelectColumnUserSession, VirusSelectColumnVirus, }
var AllVirusUpdateColumn = []VirusUpdateColumn{ VirusUpdateColumnCreatedAt, VirusUpdateColumnFileID, VirusUpdateColumnFilename, VirusUpdateColumnID, VirusUpdateColumnUpdatedAt, VirusUpdateColumnUserSession, VirusUpdateColumnVirus, }
var DocumentOperationNames = map[string]string{ GetBucketDocument: "GetBucket", GetFileDocument: "GetFile", ListFilesSummaryDocument: "ListFilesSummary", InsertFileDocument: "InsertFile", UpdateFileDocument: "UpdateFile", DeleteFileDocument: "DeleteFile", InsertVirusDocument: "InsertVirus", }
Functions ¶
func WithHeaders ¶ added in v0.4.0
func WithHeaders(header http.Header) clientv2.RequestInterceptor
Types ¶
type BooleanComparisonExp ¶ added in v0.4.0
type BooleanComparisonExp struct { Eq *bool `json:"_eq,omitempty"` Gt *bool `json:"_gt,omitempty"` Gte *bool `json:"_gte,omitempty"` In []bool `json:"_in,omitempty"` IsNull *bool `json:"_is_null,omitempty"` Lt *bool `json:"_lt,omitempty"` Lte *bool `json:"_lte,omitempty"` Neq *bool `json:"_neq,omitempty"` Nin []bool `json:"_nin,omitempty"` }
Boolean expression to compare columns of type "Boolean". All fields are combined with logical 'AND'.
type BucketMetadataFragment ¶ added in v0.4.0
type BucketMetadataFragment struct { ID string "json:\"id\" graphql:\"id\"" MinUploadFileSize int64 "json:\"minUploadFileSize\" graphql:\"minUploadFileSize\"" MaxUploadFileSize int64 "json:\"maxUploadFileSize\" graphql:\"maxUploadFileSize\"" PresignedUrlsEnabled bool "json:\"presignedUrlsEnabled\" graphql:\"presignedUrlsEnabled\"" DownloadExpiration int64 "json:\"downloadExpiration\" graphql:\"downloadExpiration\"" CreatedAt string "json:\"createdAt\" graphql:\"createdAt\"" UpdatedAt string "json:\"updatedAt\" graphql:\"updatedAt\"" CacheControl *string "json:\"cacheControl,omitempty\" graphql:\"cacheControl\"" }
func (*BucketMetadataFragment) GetCacheControl ¶ added in v0.4.0
func (t *BucketMetadataFragment) GetCacheControl() *string
func (*BucketMetadataFragment) GetCreatedAt ¶ added in v0.4.0
func (t *BucketMetadataFragment) GetCreatedAt() string
func (*BucketMetadataFragment) GetDownloadExpiration ¶ added in v0.4.0
func (t *BucketMetadataFragment) GetDownloadExpiration() int64
func (*BucketMetadataFragment) GetID ¶ added in v0.4.0
func (t *BucketMetadataFragment) GetID() string
func (*BucketMetadataFragment) GetMaxUploadFileSize ¶ added in v0.4.0
func (t *BucketMetadataFragment) GetMaxUploadFileSize() int64
func (*BucketMetadataFragment) GetMinUploadFileSize ¶ added in v0.4.0
func (t *BucketMetadataFragment) GetMinUploadFileSize() int64
func (*BucketMetadataFragment) GetPresignedUrlsEnabled ¶ added in v0.4.0
func (t *BucketMetadataFragment) GetPresignedUrlsEnabled() bool
func (*BucketMetadataFragment) GetUpdatedAt ¶ added in v0.4.0
func (t *BucketMetadataFragment) GetUpdatedAt() string
func (*BucketMetadataFragment) ToControllerType ¶ added in v0.4.0
func (md *BucketMetadataFragment) ToControllerType() controller.BucketMetadata
type Buckets ¶ added in v0.4.0
type Buckets struct { CacheControl *string `json:"cacheControl,omitempty"` CreatedAt string `json:"createdAt"` DownloadExpiration int64 `json:"downloadExpiration"` // An array relationship Files []*Files `json:"files"` // An aggregate relationship FilesAggregate FilesAggregate `json:"files_aggregate"` ID string `json:"id"` MaxUploadFileSize int64 `json:"maxUploadFileSize"` MinUploadFileSize int64 `json:"minUploadFileSize"` PresignedUrlsEnabled bool `json:"presignedUrlsEnabled"` UpdatedAt string `json:"updatedAt"` }
columns and relationships of "storage.buckets"
type BucketsAggregate ¶ added in v0.4.0
type BucketsAggregate struct { Aggregate *BucketsAggregateFields `json:"aggregate,omitempty"` Nodes []*Buckets `json:"nodes"` }
aggregated selection of "storage.buckets"
type BucketsAggregateFields ¶ added in v0.4.0
type BucketsAggregateFields struct { Avg *BucketsAvgFields `json:"avg,omitempty"` Count int64 `json:"count"` Max *BucketsMaxFields `json:"max,omitempty"` Min *BucketsMinFields `json:"min,omitempty"` Stddev *BucketsStddevFields `json:"stddev,omitempty"` StddevPop *BucketsStddevPopFields `json:"stddev_pop,omitempty"` StddevSamp *BucketsStddevSampFields `json:"stddev_samp,omitempty"` Sum *BucketsSumFields `json:"sum,omitempty"` VarPop *BucketsVarPopFields `json:"var_pop,omitempty"` VarSamp *BucketsVarSampFields `json:"var_samp,omitempty"` Variance *BucketsVarianceFields `json:"variance,omitempty"` }
aggregate fields of "storage.buckets"
type BucketsAvgFields ¶ added in v0.4.0
type BucketsAvgFields struct { DownloadExpiration *float64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *float64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *float64 `json:"minUploadFileSize,omitempty"` }
aggregate avg on columns
type BucketsBoolExp ¶ added in v0.4.0
type BucketsBoolExp struct { And []*BucketsBoolExp `json:"_and,omitempty"` Not *BucketsBoolExp `json:"_not,omitempty"` Or []*BucketsBoolExp `json:"_or,omitempty"` CacheControl *StringComparisonExp `json:"cacheControl,omitempty"` CreatedAt *TimestamptzComparisonExp `json:"createdAt,omitempty"` DownloadExpiration *IntComparisonExp `json:"downloadExpiration,omitempty"` Files *FilesBoolExp `json:"files,omitempty"` FilesAggregate *FilesAggregateBoolExp `json:"files_aggregate,omitempty"` ID *StringComparisonExp `json:"id,omitempty"` MaxUploadFileSize *IntComparisonExp `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *IntComparisonExp `json:"minUploadFileSize,omitempty"` PresignedUrlsEnabled *BooleanComparisonExp `json:"presignedUrlsEnabled,omitempty"` UpdatedAt *TimestamptzComparisonExp `json:"updatedAt,omitempty"` }
Boolean expression to filter rows from the table "storage.buckets". All fields are combined with a logical 'AND'.
type BucketsConstraint ¶ added in v0.4.0
type BucketsConstraint string
unique or primary key constraints on table "storage.buckets"
const ( // unique or primary key constraint on columns "id" BucketsConstraintBucketsPkey BucketsConstraint = "buckets_pkey" )
func (BucketsConstraint) IsValid ¶ added in v0.4.0
func (e BucketsConstraint) IsValid() bool
func (BucketsConstraint) MarshalGQL ¶ added in v0.4.0
func (e BucketsConstraint) MarshalGQL(w io.Writer)
func (BucketsConstraint) String ¶ added in v0.4.0
func (e BucketsConstraint) String() string
func (*BucketsConstraint) UnmarshalGQL ¶ added in v0.4.0
func (e *BucketsConstraint) UnmarshalGQL(v interface{}) error
type BucketsIncInput ¶ added in v0.4.0
type BucketsIncInput struct { DownloadExpiration *int64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *int64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *int64 `json:"minUploadFileSize,omitempty"` }
input type for incrementing numeric columns in table "storage.buckets"
type BucketsInsertInput ¶ added in v0.4.0
type BucketsInsertInput struct { CacheControl *string `json:"cacheControl,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` DownloadExpiration *int64 `json:"downloadExpiration,omitempty"` Files *FilesArrRelInsertInput `json:"files,omitempty"` ID *string `json:"id,omitempty"` MaxUploadFileSize *int64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *int64 `json:"minUploadFileSize,omitempty"` PresignedUrlsEnabled *bool `json:"presignedUrlsEnabled,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` }
input type for inserting data into table "storage.buckets"
type BucketsMaxFields ¶ added in v0.4.0
type BucketsMaxFields struct { CacheControl *string `json:"cacheControl,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` DownloadExpiration *int64 `json:"downloadExpiration,omitempty"` ID *string `json:"id,omitempty"` MaxUploadFileSize *int64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *int64 `json:"minUploadFileSize,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` }
aggregate max on columns
type BucketsMinFields ¶ added in v0.4.0
type BucketsMinFields struct { CacheControl *string `json:"cacheControl,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` DownloadExpiration *int64 `json:"downloadExpiration,omitempty"` ID *string `json:"id,omitempty"` MaxUploadFileSize *int64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *int64 `json:"minUploadFileSize,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` }
aggregate min on columns
type BucketsMutationResponse ¶ added in v0.4.0
type BucketsMutationResponse struct { // number of rows affected by the mutation AffectedRows int64 `json:"affected_rows"` // data from the rows affected by the mutation Returning []*Buckets `json:"returning"` }
response of any mutation on the table "storage.buckets"
type BucketsObjRelInsertInput ¶ added in v0.4.0
type BucketsObjRelInsertInput struct { Data BucketsInsertInput `json:"data"` // upsert condition OnConflict *BucketsOnConflict `json:"on_conflict,omitempty"` }
input type for inserting object relation for remote table "storage.buckets"
type BucketsOnConflict ¶ added in v0.4.0
type BucketsOnConflict struct { Constraint BucketsConstraint `json:"constraint"` UpdateColumns []BucketsUpdateColumn `json:"update_columns"` Where *BucketsBoolExp `json:"where,omitempty"` }
on_conflict condition type for table "storage.buckets"
type BucketsOrderBy ¶ added in v0.4.0
type BucketsOrderBy struct { CacheControl *OrderBy `json:"cacheControl,omitempty"` CreatedAt *OrderBy `json:"createdAt,omitempty"` DownloadExpiration *OrderBy `json:"downloadExpiration,omitempty"` FilesAggregate *FilesAggregateOrderBy `json:"files_aggregate,omitempty"` ID *OrderBy `json:"id,omitempty"` MaxUploadFileSize *OrderBy `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *OrderBy `json:"minUploadFileSize,omitempty"` PresignedUrlsEnabled *OrderBy `json:"presignedUrlsEnabled,omitempty"` UpdatedAt *OrderBy `json:"updatedAt,omitempty"` }
Ordering options when selecting data from "storage.buckets".
type BucketsPkColumnsInput ¶ added in v0.4.0
type BucketsPkColumnsInput struct {
ID string `json:"id"`
}
primary key columns input for table: storage.buckets
type BucketsSelectColumn ¶ added in v0.4.0
type BucketsSelectColumn string
select columns of table "storage.buckets"
const ( // column name BucketsSelectColumnCacheControl BucketsSelectColumn = "cacheControl" // column name BucketsSelectColumnCreatedAt BucketsSelectColumn = "createdAt" // column name BucketsSelectColumnDownloadExpiration BucketsSelectColumn = "downloadExpiration" // column name BucketsSelectColumnID BucketsSelectColumn = "id" // column name BucketsSelectColumnMaxUploadFileSize BucketsSelectColumn = "maxUploadFileSize" // column name BucketsSelectColumnMinUploadFileSize BucketsSelectColumn = "minUploadFileSize" // column name BucketsSelectColumnPresignedUrlsEnabled BucketsSelectColumn = "presignedUrlsEnabled" // column name BucketsSelectColumnUpdatedAt BucketsSelectColumn = "updatedAt" )
func (BucketsSelectColumn) IsValid ¶ added in v0.4.0
func (e BucketsSelectColumn) IsValid() bool
func (BucketsSelectColumn) MarshalGQL ¶ added in v0.4.0
func (e BucketsSelectColumn) MarshalGQL(w io.Writer)
func (BucketsSelectColumn) String ¶ added in v0.4.0
func (e BucketsSelectColumn) String() string
func (*BucketsSelectColumn) UnmarshalGQL ¶ added in v0.4.0
func (e *BucketsSelectColumn) UnmarshalGQL(v interface{}) error
type BucketsSetInput ¶ added in v0.4.0
type BucketsSetInput struct { CacheControl *string `json:"cacheControl,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` DownloadExpiration *int64 `json:"downloadExpiration,omitempty"` ID *string `json:"id,omitempty"` MaxUploadFileSize *int64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *int64 `json:"minUploadFileSize,omitempty"` PresignedUrlsEnabled *bool `json:"presignedUrlsEnabled,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` }
input type for updating data in table "storage.buckets"
type BucketsStddevFields ¶ added in v0.4.0
type BucketsStddevFields struct { DownloadExpiration *float64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *float64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *float64 `json:"minUploadFileSize,omitempty"` }
aggregate stddev on columns
type BucketsStddevPopFields ¶ added in v0.4.0
type BucketsStddevPopFields struct { DownloadExpiration *float64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *float64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *float64 `json:"minUploadFileSize,omitempty"` }
aggregate stddev_pop on columns
type BucketsStddevSampFields ¶ added in v0.4.0
type BucketsStddevSampFields struct { DownloadExpiration *float64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *float64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *float64 `json:"minUploadFileSize,omitempty"` }
aggregate stddev_samp on columns
type BucketsStreamCursorInput ¶ added in v0.4.0
type BucketsStreamCursorInput struct { // Stream column input with initial value InitialValue BucketsStreamCursorValueInput `json:"initial_value"` // cursor ordering Ordering *CursorOrdering `json:"ordering,omitempty"` }
Streaming cursor of the table "buckets"
type BucketsStreamCursorValueInput ¶ added in v0.4.0
type BucketsStreamCursorValueInput struct { CacheControl *string `json:"cacheControl,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` DownloadExpiration *int64 `json:"downloadExpiration,omitempty"` ID *string `json:"id,omitempty"` MaxUploadFileSize *int64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *int64 `json:"minUploadFileSize,omitempty"` PresignedUrlsEnabled *bool `json:"presignedUrlsEnabled,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` }
Initial value of the column from where the streaming should start
type BucketsSumFields ¶ added in v0.4.0
type BucketsSumFields struct { DownloadExpiration *int64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *int64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *int64 `json:"minUploadFileSize,omitempty"` }
aggregate sum on columns
type BucketsUpdateColumn ¶ added in v0.4.0
type BucketsUpdateColumn string
update columns of table "storage.buckets"
const ( // column name BucketsUpdateColumnCacheControl BucketsUpdateColumn = "cacheControl" // column name BucketsUpdateColumnCreatedAt BucketsUpdateColumn = "createdAt" // column name BucketsUpdateColumnDownloadExpiration BucketsUpdateColumn = "downloadExpiration" // column name BucketsUpdateColumnID BucketsUpdateColumn = "id" // column name BucketsUpdateColumnMaxUploadFileSize BucketsUpdateColumn = "maxUploadFileSize" // column name BucketsUpdateColumnMinUploadFileSize BucketsUpdateColumn = "minUploadFileSize" // column name BucketsUpdateColumnPresignedUrlsEnabled BucketsUpdateColumn = "presignedUrlsEnabled" // column name BucketsUpdateColumnUpdatedAt BucketsUpdateColumn = "updatedAt" )
func (BucketsUpdateColumn) IsValid ¶ added in v0.4.0
func (e BucketsUpdateColumn) IsValid() bool
func (BucketsUpdateColumn) MarshalGQL ¶ added in v0.4.0
func (e BucketsUpdateColumn) MarshalGQL(w io.Writer)
func (BucketsUpdateColumn) String ¶ added in v0.4.0
func (e BucketsUpdateColumn) String() string
func (*BucketsUpdateColumn) UnmarshalGQL ¶ added in v0.4.0
func (e *BucketsUpdateColumn) UnmarshalGQL(v interface{}) error
type BucketsUpdates ¶ added in v0.4.0
type BucketsUpdates struct { // increments the numeric columns with given value of the filtered values Inc *BucketsIncInput `json:"_inc,omitempty"` // sets the columns of the filtered rows to the given values Set *BucketsSetInput `json:"_set,omitempty"` // filter the rows which have to be updated Where BucketsBoolExp `json:"where"` }
type BucketsVarPopFields ¶ added in v0.4.0
type BucketsVarPopFields struct { DownloadExpiration *float64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *float64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *float64 `json:"minUploadFileSize,omitempty"` }
aggregate var_pop on columns
type BucketsVarSampFields ¶ added in v0.4.0
type BucketsVarSampFields struct { DownloadExpiration *float64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *float64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *float64 `json:"minUploadFileSize,omitempty"` }
aggregate var_samp on columns
type BucketsVarianceFields ¶ added in v0.4.0
type BucketsVarianceFields struct { DownloadExpiration *float64 `json:"downloadExpiration,omitempty"` MaxUploadFileSize *float64 `json:"maxUploadFileSize,omitempty"` MinUploadFileSize *float64 `json:"minUploadFileSize,omitempty"` }
aggregate variance on columns
type Client ¶ added in v0.4.0
func (*Client) DeleteFile ¶ added in v0.4.0
func (c *Client) DeleteFile(ctx context.Context, id string, interceptors ...clientv2.RequestInterceptor) (*DeleteFile, error)
func (*Client) InsertFile ¶ added in v0.4.0
func (c *Client) InsertFile(ctx context.Context, object FilesInsertInput, interceptors ...clientv2.RequestInterceptor) (*InsertFile, error)
func (*Client) InsertVirus ¶ added in v0.4.0
func (c *Client) InsertVirus(ctx context.Context, object VirusInsertInput, interceptors ...clientv2.RequestInterceptor) (*InsertVirus, error)
func (*Client) ListFilesSummary ¶ added in v0.4.0
func (c *Client) ListFilesSummary(ctx context.Context, interceptors ...clientv2.RequestInterceptor) (*ListFilesSummary, error)
func (*Client) UpdateFile ¶ added in v0.4.0
func (c *Client) UpdateFile(ctx context.Context, id string, set FilesSetInput, interceptors ...clientv2.RequestInterceptor) (*UpdateFile, error)
type CursorOrdering ¶ added in v0.4.0
type CursorOrdering string
ordering argument of a cursor
const ( // ascending ordering of the cursor CursorOrderingAsc CursorOrdering = "ASC" // descending ordering of the cursor CursorOrderingDesc CursorOrdering = "DESC" )
func (CursorOrdering) IsValid ¶ added in v0.4.0
func (e CursorOrdering) IsValid() bool
func (CursorOrdering) MarshalGQL ¶ added in v0.4.0
func (e CursorOrdering) MarshalGQL(w io.Writer)
func (CursorOrdering) String ¶ added in v0.4.0
func (e CursorOrdering) String() string
func (*CursorOrdering) UnmarshalGQL ¶ added in v0.4.0
func (e *CursorOrdering) UnmarshalGQL(v interface{}) error
type DeleteFile ¶ added in v0.4.0
type DeleteFile struct {
DeleteFile *DeleteFile_DeleteFile "json:\"deleteFile,omitempty\" graphql:\"deleteFile\""
}
func (*DeleteFile) GetDeleteFile ¶ added in v0.4.0
func (t *DeleteFile) GetDeleteFile() *DeleteFile_DeleteFile
type DeleteFile_DeleteFile ¶ added in v0.4.0
type DeleteFile_DeleteFile struct {
ID string "json:\"id\" graphql:\"id\""
}
func (*DeleteFile_DeleteFile) GetID ¶ added in v0.4.0
func (t *DeleteFile_DeleteFile) GetID() string
type FileMetadataFragment ¶ added in v0.4.0
type FileMetadataFragment struct { ID string "json:\"id\" graphql:\"id\"" Name *string "json:\"name,omitempty\" graphql:\"name\"" Size *int64 "json:\"size,omitempty\" graphql:\"size\"" BucketID string "json:\"bucketId\" graphql:\"bucketId\"" Etag *string "json:\"etag,omitempty\" graphql:\"etag\"" CreatedAt string "json:\"createdAt\" graphql:\"createdAt\"" UpdatedAt string "json:\"updatedAt\" graphql:\"updatedAt\"" IsUploaded *bool "json:\"isUploaded,omitempty\" graphql:\"isUploaded\"" MimeType *string "json:\"mimeType,omitempty\" graphql:\"mimeType\"" UploadedByUserID *string "json:\"uploadedByUserId,omitempty\" graphql:\"uploadedByUserId\"" Metadata map[string]interface{} "json:\"metadata,omitempty\" graphql:\"metadata\"" }
func (*FileMetadataFragment) GetBucketID ¶ added in v0.4.0
func (t *FileMetadataFragment) GetBucketID() string
func (*FileMetadataFragment) GetCreatedAt ¶ added in v0.4.0
func (t *FileMetadataFragment) GetCreatedAt() string
func (*FileMetadataFragment) GetEtag ¶ added in v0.4.0
func (t *FileMetadataFragment) GetEtag() *string
func (*FileMetadataFragment) GetID ¶ added in v0.4.0
func (t *FileMetadataFragment) GetID() string
func (*FileMetadataFragment) GetIsUploaded ¶ added in v0.4.0
func (t *FileMetadataFragment) GetIsUploaded() *bool
func (*FileMetadataFragment) GetMetadata ¶ added in v0.4.0
func (t *FileMetadataFragment) GetMetadata() map[string]interface{}
func (*FileMetadataFragment) GetMimeType ¶ added in v0.4.0
func (t *FileMetadataFragment) GetMimeType() *string
func (*FileMetadataFragment) GetName ¶ added in v0.4.0
func (t *FileMetadataFragment) GetName() *string
func (*FileMetadataFragment) GetSize ¶ added in v0.4.0
func (t *FileMetadataFragment) GetSize() *int64
func (*FileMetadataFragment) GetUpdatedAt ¶ added in v0.4.0
func (t *FileMetadataFragment) GetUpdatedAt() string
func (*FileMetadataFragment) GetUploadedByUserID ¶ added in v0.4.0
func (t *FileMetadataFragment) GetUploadedByUserID() *string
func (*FileMetadataFragment) ToControllerType ¶ added in v0.4.0
func (md *FileMetadataFragment) ToControllerType() controller.FileMetadata
type FileMetadataSummaryFragment ¶ added in v0.4.0
type FileMetadataSummaryFragment struct { ID string "json:\"id\" graphql:\"id\"" Name *string "json:\"name,omitempty\" graphql:\"name\"" BucketID string "json:\"bucketId\" graphql:\"bucketId\"" IsUploaded *bool "json:\"isUploaded,omitempty\" graphql:\"isUploaded\"" }
func (*FileMetadataSummaryFragment) GetBucketID ¶ added in v0.4.0
func (t *FileMetadataSummaryFragment) GetBucketID() string
func (*FileMetadataSummaryFragment) GetID ¶ added in v0.4.0
func (t *FileMetadataSummaryFragment) GetID() string
func (*FileMetadataSummaryFragment) GetIsUploaded ¶ added in v0.4.0
func (t *FileMetadataSummaryFragment) GetIsUploaded() *bool
func (*FileMetadataSummaryFragment) GetName ¶ added in v0.4.0
func (t *FileMetadataSummaryFragment) GetName() *string
func (*FileMetadataSummaryFragment) ToControllerType ¶ added in v0.4.0
func (md *FileMetadataSummaryFragment) ToControllerType() controller.FileSummary
type Files ¶ added in v0.4.0
type Files struct { // An object relationship Bucket Buckets `json:"bucket"` BucketID string `json:"bucketId"` CreatedAt string `json:"createdAt"` Etag *string `json:"etag,omitempty"` ID string `json:"id"` IsUploaded *bool `json:"isUploaded,omitempty"` Metadata map[string]interface{} `json:"metadata,omitempty"` MimeType *string `json:"mimeType,omitempty"` Name *string `json:"name,omitempty"` Size *int64 `json:"size,omitempty"` UpdatedAt string `json:"updatedAt"` UploadedByUserID *string `json:"uploadedByUserId,omitempty"` }
columns and relationships of "storage.files"
type FilesAggregate ¶ added in v0.4.0
type FilesAggregate struct { Aggregate *FilesAggregateFields `json:"aggregate,omitempty"` Nodes []*Files `json:"nodes"` }
aggregated selection of "storage.files"
type FilesAggregateBoolExp ¶ added in v0.4.0
type FilesAggregateBoolExp struct { BoolAnd *FilesAggregateBoolExpBoolAnd `json:"bool_and,omitempty"` BoolOr *FilesAggregateBoolExpBoolOr `json:"bool_or,omitempty"` Count *FilesAggregateBoolExpCount `json:"count,omitempty"` }
type FilesAggregateBoolExpBoolAnd ¶ added in v0.4.0
type FilesAggregateBoolExpBoolAnd struct { Arguments FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns `json:"arguments"` Distinct *bool `json:"distinct,omitempty"` Filter *FilesBoolExp `json:"filter,omitempty"` Predicate BooleanComparisonExp `json:"predicate"` }
type FilesAggregateBoolExpBoolOr ¶ added in v0.4.0
type FilesAggregateBoolExpBoolOr struct { Arguments FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns `json:"arguments"` Distinct *bool `json:"distinct,omitempty"` Filter *FilesBoolExp `json:"filter,omitempty"` Predicate BooleanComparisonExp `json:"predicate"` }
type FilesAggregateBoolExpCount ¶ added in v0.4.0
type FilesAggregateBoolExpCount struct { Arguments []FilesSelectColumn `json:"arguments,omitempty"` Distinct *bool `json:"distinct,omitempty"` Filter *FilesBoolExp `json:"filter,omitempty"` Predicate IntComparisonExp `json:"predicate"` }
type FilesAggregateFields ¶ added in v0.4.0
type FilesAggregateFields struct { Avg *FilesAvgFields `json:"avg,omitempty"` Count int64 `json:"count"` Max *FilesMaxFields `json:"max,omitempty"` Min *FilesMinFields `json:"min,omitempty"` Stddev *FilesStddevFields `json:"stddev,omitempty"` StddevPop *FilesStddevPopFields `json:"stddev_pop,omitempty"` StddevSamp *FilesStddevSampFields `json:"stddev_samp,omitempty"` Sum *FilesSumFields `json:"sum,omitempty"` VarPop *FilesVarPopFields `json:"var_pop,omitempty"` VarSamp *FilesVarSampFields `json:"var_samp,omitempty"` Variance *FilesVarianceFields `json:"variance,omitempty"` }
aggregate fields of "storage.files"
type FilesAggregateOrderBy ¶ added in v0.4.0
type FilesAggregateOrderBy struct { Avg *FilesAvgOrderBy `json:"avg,omitempty"` Count *OrderBy `json:"count,omitempty"` Max *FilesMaxOrderBy `json:"max,omitempty"` Min *FilesMinOrderBy `json:"min,omitempty"` Stddev *FilesStddevOrderBy `json:"stddev,omitempty"` StddevPop *FilesStddevPopOrderBy `json:"stddev_pop,omitempty"` StddevSamp *FilesStddevSampOrderBy `json:"stddev_samp,omitempty"` Sum *FilesSumOrderBy `json:"sum,omitempty"` VarPop *FilesVarPopOrderBy `json:"var_pop,omitempty"` VarSamp *FilesVarSampOrderBy `json:"var_samp,omitempty"` Variance *FilesVarianceOrderBy `json:"variance,omitempty"` }
order by aggregate values of table "storage.files"
type FilesAppendInput ¶ added in v0.4.0
type FilesAppendInput struct {
Metadata map[string]interface{} `json:"metadata,omitempty"`
}
append existing jsonb value of filtered columns with new jsonb value
type FilesArrRelInsertInput ¶ added in v0.4.0
type FilesArrRelInsertInput struct { Data []*FilesInsertInput `json:"data"` // upsert condition OnConflict *FilesOnConflict `json:"on_conflict,omitempty"` }
input type for inserting array relation for remote table "storage.files"
type FilesAvgFields ¶ added in v0.4.0
type FilesAvgFields struct {
Size *float64 `json:"size,omitempty"`
}
aggregate avg on columns
type FilesAvgOrderBy ¶ added in v0.4.0
type FilesAvgOrderBy struct {
Size *OrderBy `json:"size,omitempty"`
}
order by avg() on columns of table "storage.files"
type FilesBoolExp ¶ added in v0.4.0
type FilesBoolExp struct { And []*FilesBoolExp `json:"_and,omitempty"` Not *FilesBoolExp `json:"_not,omitempty"` Or []*FilesBoolExp `json:"_or,omitempty"` Bucket *BucketsBoolExp `json:"bucket,omitempty"` BucketID *StringComparisonExp `json:"bucketId,omitempty"` CreatedAt *TimestamptzComparisonExp `json:"createdAt,omitempty"` Etag *StringComparisonExp `json:"etag,omitempty"` ID *UUIDComparisonExp `json:"id,omitempty"` IsUploaded *BooleanComparisonExp `json:"isUploaded,omitempty"` Metadata *JsonbComparisonExp `json:"metadata,omitempty"` MimeType *StringComparisonExp `json:"mimeType,omitempty"` Name *StringComparisonExp `json:"name,omitempty"` Size *IntComparisonExp `json:"size,omitempty"` UpdatedAt *TimestamptzComparisonExp `json:"updatedAt,omitempty"` UploadedByUserID *UUIDComparisonExp `json:"uploadedByUserId,omitempty"` }
Boolean expression to filter rows from the table "storage.files". All fields are combined with a logical 'AND'.
type FilesConstraint ¶ added in v0.4.0
type FilesConstraint string
unique or primary key constraints on table "storage.files"
const ( // unique or primary key constraint on columns "id" FilesConstraintFilesPkey FilesConstraint = "files_pkey" )
func (FilesConstraint) IsValid ¶ added in v0.4.0
func (e FilesConstraint) IsValid() bool
func (FilesConstraint) MarshalGQL ¶ added in v0.4.0
func (e FilesConstraint) MarshalGQL(w io.Writer)
func (FilesConstraint) String ¶ added in v0.4.0
func (e FilesConstraint) String() string
func (*FilesConstraint) UnmarshalGQL ¶ added in v0.4.0
func (e *FilesConstraint) UnmarshalGQL(v interface{}) error
type FilesDeleteAtPathInput ¶ added in v0.4.0
type FilesDeleteAtPathInput struct {
Metadata []string `json:"metadata,omitempty"`
}
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
type FilesDeleteElemInput ¶ added in v0.4.0
type FilesDeleteElemInput struct {
Metadata *int64 `json:"metadata,omitempty"`
}
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
type FilesDeleteKeyInput ¶ added in v0.4.0
type FilesDeleteKeyInput struct {
Metadata *string `json:"metadata,omitempty"`
}
delete key/value pair or string element. key/value pairs are matched based on their key value
type FilesIncInput ¶ added in v0.4.0
type FilesIncInput struct {
Size *int64 `json:"size,omitempty"`
}
input type for incrementing numeric columns in table "storage.files"
type FilesInsertInput ¶ added in v0.4.0
type FilesInsertInput struct { Bucket *BucketsObjRelInsertInput `json:"bucket,omitempty"` BucketID *string `json:"bucketId,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` Etag *string `json:"etag,omitempty"` ID *string `json:"id,omitempty"` IsUploaded *bool `json:"isUploaded,omitempty"` Metadata map[string]interface{} `json:"metadata,omitempty"` MimeType *string `json:"mimeType,omitempty"` Name *string `json:"name,omitempty"` Size *int64 `json:"size,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` UploadedByUserID *string `json:"uploadedByUserId,omitempty"` }
input type for inserting data into table "storage.files"
type FilesMaxFields ¶ added in v0.4.0
type FilesMaxFields struct { BucketID *string `json:"bucketId,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` Etag *string `json:"etag,omitempty"` ID *string `json:"id,omitempty"` MimeType *string `json:"mimeType,omitempty"` Name *string `json:"name,omitempty"` Size *int64 `json:"size,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` UploadedByUserID *string `json:"uploadedByUserId,omitempty"` }
aggregate max on columns
type FilesMaxOrderBy ¶ added in v0.4.0
type FilesMaxOrderBy struct { BucketID *OrderBy `json:"bucketId,omitempty"` CreatedAt *OrderBy `json:"createdAt,omitempty"` Etag *OrderBy `json:"etag,omitempty"` ID *OrderBy `json:"id,omitempty"` MimeType *OrderBy `json:"mimeType,omitempty"` Name *OrderBy `json:"name,omitempty"` Size *OrderBy `json:"size,omitempty"` UpdatedAt *OrderBy `json:"updatedAt,omitempty"` UploadedByUserID *OrderBy `json:"uploadedByUserId,omitempty"` }
order by max() on columns of table "storage.files"
type FilesMinFields ¶ added in v0.4.0
type FilesMinFields struct { BucketID *string `json:"bucketId,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` Etag *string `json:"etag,omitempty"` ID *string `json:"id,omitempty"` MimeType *string `json:"mimeType,omitempty"` Name *string `json:"name,omitempty"` Size *int64 `json:"size,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` UploadedByUserID *string `json:"uploadedByUserId,omitempty"` }
aggregate min on columns
type FilesMinOrderBy ¶ added in v0.4.0
type FilesMinOrderBy struct { BucketID *OrderBy `json:"bucketId,omitempty"` CreatedAt *OrderBy `json:"createdAt,omitempty"` Etag *OrderBy `json:"etag,omitempty"` ID *OrderBy `json:"id,omitempty"` MimeType *OrderBy `json:"mimeType,omitempty"` Name *OrderBy `json:"name,omitempty"` Size *OrderBy `json:"size,omitempty"` UpdatedAt *OrderBy `json:"updatedAt,omitempty"` UploadedByUserID *OrderBy `json:"uploadedByUserId,omitempty"` }
order by min() on columns of table "storage.files"
type FilesMutationResponse ¶ added in v0.4.0
type FilesMutationResponse struct { // number of rows affected by the mutation AffectedRows int64 `json:"affected_rows"` // data from the rows affected by the mutation Returning []*Files `json:"returning"` }
response of any mutation on the table "storage.files"
type FilesObjRelInsertInput ¶ added in v0.4.0
type FilesObjRelInsertInput struct { Data FilesInsertInput `json:"data"` // upsert condition OnConflict *FilesOnConflict `json:"on_conflict,omitempty"` }
input type for inserting object relation for remote table "storage.files"
type FilesOnConflict ¶ added in v0.4.0
type FilesOnConflict struct { Constraint FilesConstraint `json:"constraint"` UpdateColumns []FilesUpdateColumn `json:"update_columns"` Where *FilesBoolExp `json:"where,omitempty"` }
on_conflict condition type for table "storage.files"
type FilesOrderBy ¶ added in v0.4.0
type FilesOrderBy struct { Bucket *BucketsOrderBy `json:"bucket,omitempty"` BucketID *OrderBy `json:"bucketId,omitempty"` CreatedAt *OrderBy `json:"createdAt,omitempty"` Etag *OrderBy `json:"etag,omitempty"` ID *OrderBy `json:"id,omitempty"` IsUploaded *OrderBy `json:"isUploaded,omitempty"` Metadata *OrderBy `json:"metadata,omitempty"` MimeType *OrderBy `json:"mimeType,omitempty"` Name *OrderBy `json:"name,omitempty"` Size *OrderBy `json:"size,omitempty"` UpdatedAt *OrderBy `json:"updatedAt,omitempty"` UploadedByUserID *OrderBy `json:"uploadedByUserId,omitempty"` }
Ordering options when selecting data from "storage.files".
type FilesPkColumnsInput ¶ added in v0.4.0
type FilesPkColumnsInput struct {
ID string `json:"id"`
}
primary key columns input for table: storage.files
type FilesPrependInput ¶ added in v0.4.0
type FilesPrependInput struct {
Metadata map[string]interface{} `json:"metadata,omitempty"`
}
prepend existing jsonb value of filtered columns with new jsonb value
type FilesSelectColumn ¶ added in v0.4.0
type FilesSelectColumn string
select columns of table "storage.files"
const ( // column name FilesSelectColumnBucketID FilesSelectColumn = "bucketId" // column name FilesSelectColumnCreatedAt FilesSelectColumn = "createdAt" // column name FilesSelectColumnEtag FilesSelectColumn = "etag" // column name FilesSelectColumnID FilesSelectColumn = "id" // column name FilesSelectColumnIsUploaded FilesSelectColumn = "isUploaded" // column name FilesSelectColumnMetadata FilesSelectColumn = "metadata" // column name FilesSelectColumnMimeType FilesSelectColumn = "mimeType" // column name FilesSelectColumnName FilesSelectColumn = "name" // column name FilesSelectColumnSize FilesSelectColumn = "size" // column name FilesSelectColumnUpdatedAt FilesSelectColumn = "updatedAt" // column name FilesSelectColumnUploadedByUserID FilesSelectColumn = "uploadedByUserId" )
func (FilesSelectColumn) IsValid ¶ added in v0.4.0
func (e FilesSelectColumn) IsValid() bool
func (FilesSelectColumn) MarshalGQL ¶ added in v0.4.0
func (e FilesSelectColumn) MarshalGQL(w io.Writer)
func (FilesSelectColumn) String ¶ added in v0.4.0
func (e FilesSelectColumn) String() string
func (*FilesSelectColumn) UnmarshalGQL ¶ added in v0.4.0
func (e *FilesSelectColumn) UnmarshalGQL(v interface{}) error
type FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns ¶ added in v0.4.0
type FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns string
select "files_aggregate_bool_exp_bool_and_arguments_columns" columns of table "storage.files"
const ( // column name FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumnsIsUploaded FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns = "isUploaded" )
func (FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) IsValid ¶ added in v0.4.0
func (e FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) IsValid() bool
func (FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) MarshalGQL ¶ added in v0.4.0
func (e FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) MarshalGQL(w io.Writer)
func (FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) String ¶ added in v0.4.0
func (e FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) String() string
func (*FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) UnmarshalGQL ¶ added in v0.4.0
func (e *FilesSelectColumnFilesAggregateBoolExpBoolAndArgumentsColumns) UnmarshalGQL(v interface{}) error
type FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns ¶ added in v0.4.0
type FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns string
select "files_aggregate_bool_exp_bool_or_arguments_columns" columns of table "storage.files"
const ( // column name FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumnsIsUploaded FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns = "isUploaded" )
func (FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) IsValid ¶ added in v0.4.0
func (e FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) IsValid() bool
func (FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) MarshalGQL ¶ added in v0.4.0
func (e FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) MarshalGQL(w io.Writer)
func (FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) String ¶ added in v0.4.0
func (e FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) String() string
func (*FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) UnmarshalGQL ¶ added in v0.4.0
func (e *FilesSelectColumnFilesAggregateBoolExpBoolOrArgumentsColumns) UnmarshalGQL(v interface{}) error
type FilesSetInput ¶ added in v0.4.0
type FilesSetInput struct { BucketID *string `json:"bucketId,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` Etag *string `json:"etag,omitempty"` ID *string `json:"id,omitempty"` IsUploaded *bool `json:"isUploaded,omitempty"` Metadata map[string]interface{} `json:"metadata,omitempty"` MimeType *string `json:"mimeType,omitempty"` Name *string `json:"name,omitempty"` Size *int64 `json:"size,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` UploadedByUserID *string `json:"uploadedByUserId,omitempty"` }
input type for updating data in table "storage.files"
type FilesStddevFields ¶ added in v0.4.0
type FilesStddevFields struct {
Size *float64 `json:"size,omitempty"`
}
aggregate stddev on columns
type FilesStddevOrderBy ¶ added in v0.4.0
type FilesStddevOrderBy struct {
Size *OrderBy `json:"size,omitempty"`
}
order by stddev() on columns of table "storage.files"
type FilesStddevPopFields ¶ added in v0.4.0
type FilesStddevPopFields struct {
Size *float64 `json:"size,omitempty"`
}
aggregate stddev_pop on columns
type FilesStddevPopOrderBy ¶ added in v0.4.0
type FilesStddevPopOrderBy struct {
Size *OrderBy `json:"size,omitempty"`
}
order by stddev_pop() on columns of table "storage.files"
type FilesStddevSampFields ¶ added in v0.4.0
type FilesStddevSampFields struct {
Size *float64 `json:"size,omitempty"`
}
aggregate stddev_samp on columns
type FilesStddevSampOrderBy ¶ added in v0.4.0
type FilesStddevSampOrderBy struct {
Size *OrderBy `json:"size,omitempty"`
}
order by stddev_samp() on columns of table "storage.files"
type FilesStreamCursorInput ¶ added in v0.4.0
type FilesStreamCursorInput struct { // Stream column input with initial value InitialValue FilesStreamCursorValueInput `json:"initial_value"` // cursor ordering Ordering *CursorOrdering `json:"ordering,omitempty"` }
Streaming cursor of the table "files"
type FilesStreamCursorValueInput ¶ added in v0.4.0
type FilesStreamCursorValueInput struct { BucketID *string `json:"bucketId,omitempty"` CreatedAt *string `json:"createdAt,omitempty"` Etag *string `json:"etag,omitempty"` ID *string `json:"id,omitempty"` IsUploaded *bool `json:"isUploaded,omitempty"` Metadata map[string]interface{} `json:"metadata,omitempty"` MimeType *string `json:"mimeType,omitempty"` Name *string `json:"name,omitempty"` Size *int64 `json:"size,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` UploadedByUserID *string `json:"uploadedByUserId,omitempty"` }
Initial value of the column from where the streaming should start
type FilesSumFields ¶ added in v0.4.0
type FilesSumFields struct {
Size *int64 `json:"size,omitempty"`
}
aggregate sum on columns
type FilesSumOrderBy ¶ added in v0.4.0
type FilesSumOrderBy struct {
Size *OrderBy `json:"size,omitempty"`
}
order by sum() on columns of table "storage.files"
type FilesUpdateColumn ¶ added in v0.4.0
type FilesUpdateColumn string
update columns of table "storage.files"
const ( // column name FilesUpdateColumnBucketID FilesUpdateColumn = "bucketId" // column name FilesUpdateColumnCreatedAt FilesUpdateColumn = "createdAt" // column name FilesUpdateColumnEtag FilesUpdateColumn = "etag" // column name FilesUpdateColumnID FilesUpdateColumn = "id" // column name FilesUpdateColumnIsUploaded FilesUpdateColumn = "isUploaded" // column name FilesUpdateColumnMetadata FilesUpdateColumn = "metadata" // column name FilesUpdateColumnMimeType FilesUpdateColumn = "mimeType" // column name FilesUpdateColumnName FilesUpdateColumn = "name" // column name FilesUpdateColumnSize FilesUpdateColumn = "size" // column name FilesUpdateColumnUpdatedAt FilesUpdateColumn = "updatedAt" // column name FilesUpdateColumnUploadedByUserID FilesUpdateColumn = "uploadedByUserId" )
func (FilesUpdateColumn) IsValid ¶ added in v0.4.0
func (e FilesUpdateColumn) IsValid() bool
func (FilesUpdateColumn) MarshalGQL ¶ added in v0.4.0
func (e FilesUpdateColumn) MarshalGQL(w io.Writer)
func (FilesUpdateColumn) String ¶ added in v0.4.0
func (e FilesUpdateColumn) String() string
func (*FilesUpdateColumn) UnmarshalGQL ¶ added in v0.4.0
func (e *FilesUpdateColumn) UnmarshalGQL(v interface{}) error
type FilesUpdates ¶ added in v0.4.0
type FilesUpdates struct { // append existing jsonb value of filtered columns with new jsonb value Append *FilesAppendInput `json:"_append,omitempty"` // delete the field or element with specified path (for JSON arrays, negative integers count from the end) DeleteAtPath *FilesDeleteAtPathInput `json:"_delete_at_path,omitempty"` // delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array DeleteElem *FilesDeleteElemInput `json:"_delete_elem,omitempty"` // delete key/value pair or string element. key/value pairs are matched based on their key value DeleteKey *FilesDeleteKeyInput `json:"_delete_key,omitempty"` // increments the numeric columns with given value of the filtered values Inc *FilesIncInput `json:"_inc,omitempty"` // prepend existing jsonb value of filtered columns with new jsonb value Prepend *FilesPrependInput `json:"_prepend,omitempty"` // sets the columns of the filtered rows to the given values Set *FilesSetInput `json:"_set,omitempty"` // filter the rows which have to be updated Where FilesBoolExp `json:"where"` }
type FilesVarPopFields ¶ added in v0.4.0
type FilesVarPopFields struct {
Size *float64 `json:"size,omitempty"`
}
aggregate var_pop on columns
type FilesVarPopOrderBy ¶ added in v0.4.0
type FilesVarPopOrderBy struct {
Size *OrderBy `json:"size,omitempty"`
}
order by var_pop() on columns of table "storage.files"
type FilesVarSampFields ¶ added in v0.4.0
type FilesVarSampFields struct {
Size *float64 `json:"size,omitempty"`
}
aggregate var_samp on columns
type FilesVarSampOrderBy ¶ added in v0.4.0
type FilesVarSampOrderBy struct {
Size *OrderBy `json:"size,omitempty"`
}
order by var_samp() on columns of table "storage.files"
type FilesVarianceFields ¶ added in v0.4.0
type FilesVarianceFields struct {
Size *float64 `json:"size,omitempty"`
}
aggregate variance on columns
type FilesVarianceOrderBy ¶ added in v0.4.0
type FilesVarianceOrderBy struct {
Size *OrderBy `json:"size,omitempty"`
}
order by variance() on columns of table "storage.files"
type GetBucket ¶ added in v0.4.0
type GetBucket struct {
Bucket *BucketMetadataFragment "json:\"bucket,omitempty\" graphql:\"bucket\""
}
func (*GetBucket) GetBucket ¶ added in v0.4.0
func (t *GetBucket) GetBucket() *BucketMetadataFragment
type GetFile ¶ added in v0.4.0
type GetFile struct {
File *FileMetadataFragment "json:\"file,omitempty\" graphql:\"file\""
}
func (*GetFile) GetFile ¶ added in v0.4.0
func (t *GetFile) GetFile() *FileMetadataFragment
type Hasura ¶
type Hasura struct {
// contains filtered or unexported fields
}
func (*Hasura) DeleteFileByID ¶
func (*Hasura) GetBucketByID ¶
func (h *Hasura) GetBucketByID( ctx context.Context, bucketID string, headers http.Header, ) (controller.BucketMetadata, *controller.APIError)
func (*Hasura) GetFileByID ¶
func (h *Hasura) GetFileByID( ctx context.Context, fileID string, headers http.Header, ) (controller.FileMetadata, *controller.APIError)
func (*Hasura) InitializeFile ¶
func (*Hasura) InsertVirus ¶ added in v0.4.0
func (*Hasura) ListFiles ¶
func (h *Hasura) ListFiles( ctx context.Context, headers http.Header, ) ([]controller.FileSummary, *controller.APIError)
func (*Hasura) PopulateMetadata ¶
type InsertFile ¶ added in v0.4.0
type InsertFile struct {
InsertFile *InsertFile_InsertFile "json:\"insertFile,omitempty\" graphql:\"insertFile\""
}
func (*InsertFile) GetInsertFile ¶ added in v0.4.0
func (t *InsertFile) GetInsertFile() *InsertFile_InsertFile
type InsertFile_InsertFile ¶ added in v0.4.0
type InsertFile_InsertFile struct {
ID string "json:\"id\" graphql:\"id\""
}
func (*InsertFile_InsertFile) GetID ¶ added in v0.4.0
func (t *InsertFile_InsertFile) GetID() string
type InsertVirus ¶ added in v0.4.0
type InsertVirus struct {
InsertVirus *InsertVirus_InsertVirus "json:\"insertVirus,omitempty\" graphql:\"insertVirus\""
}
func (*InsertVirus) GetInsertVirus ¶ added in v0.4.0
func (t *InsertVirus) GetInsertVirus() *InsertVirus_InsertVirus
type InsertVirus_InsertVirus ¶ added in v0.4.0
type InsertVirus_InsertVirus struct {
ID string "json:\"id\" graphql:\"id\""
}
func (*InsertVirus_InsertVirus) GetID ¶ added in v0.4.0
func (t *InsertVirus_InsertVirus) GetID() string
type IntComparisonExp ¶ added in v0.4.0
type IntComparisonExp struct { Eq *int64 `json:"_eq,omitempty"` Gt *int64 `json:"_gt,omitempty"` Gte *int64 `json:"_gte,omitempty"` In []int64 `json:"_in,omitempty"` IsNull *bool `json:"_is_null,omitempty"` Lt *int64 `json:"_lt,omitempty"` Lte *int64 `json:"_lte,omitempty"` Neq *int64 `json:"_neq,omitempty"` Nin []int64 `json:"_nin,omitempty"` }
Boolean expression to compare columns of type "Int". All fields are combined with logical 'AND'.
type JsonbCastExp ¶ added in v0.4.0
type JsonbCastExp struct {
String *StringComparisonExp `json:"String,omitempty"`
}
type JsonbComparisonExp ¶ added in v0.4.0
type JsonbComparisonExp struct { Cast *JsonbCastExp `json:"_cast,omitempty"` // is the column contained in the given json value ContainedIn map[string]interface{} `json:"_contained_in,omitempty"` // does the column contain the given json value at the top level Contains map[string]interface{} `json:"_contains,omitempty"` Eq map[string]interface{} `json:"_eq,omitempty"` Gt map[string]interface{} `json:"_gt,omitempty"` Gte map[string]interface{} `json:"_gte,omitempty"` // does the string exist as a top-level key in the column HasKey *string `json:"_has_key,omitempty"` // do all of these strings exist as top-level keys in the column HasKeysAll []string `json:"_has_keys_all,omitempty"` // do any of these strings exist as top-level keys in the column HasKeysAny []string `json:"_has_keys_any,omitempty"` In []map[string]interface{} `json:"_in,omitempty"` IsNull *bool `json:"_is_null,omitempty"` Lt map[string]interface{} `json:"_lt,omitempty"` Lte map[string]interface{} `json:"_lte,omitempty"` Neq map[string]interface{} `json:"_neq,omitempty"` Nin []map[string]interface{} `json:"_nin,omitempty"` }
Boolean expression to compare columns of type "jsonb". All fields are combined with logical 'AND'.
type ListFilesSummary ¶ added in v0.4.0
type ListFilesSummary struct {
Files []*FileMetadataSummaryFragment "json:\"files\" graphql:\"files\""
}
func (*ListFilesSummary) GetFiles ¶ added in v0.4.0
func (t *ListFilesSummary) GetFiles() []*FileMetadataSummaryFragment
type MutationRoot ¶ added in v0.4.0
type MutationRoot struct { DeleteBucket *Buckets "json:\"deleteBucket,omitempty\" graphql:\"deleteBucket\"" DeleteBuckets *BucketsMutationResponse "json:\"deleteBuckets,omitempty\" graphql:\"deleteBuckets\"" DeleteFile *Files "json:\"deleteFile,omitempty\" graphql:\"deleteFile\"" DeleteFiles *FilesMutationResponse "json:\"deleteFiles,omitempty\" graphql:\"deleteFiles\"" DeleteVirus *Virus "json:\"deleteVirus,omitempty\" graphql:\"deleteVirus\"" DeleteViruses *VirusMutationResponse "json:\"deleteViruses,omitempty\" graphql:\"deleteViruses\"" InsertBucket *Buckets "json:\"insertBucket,omitempty\" graphql:\"insertBucket\"" InsertBuckets *BucketsMutationResponse "json:\"insertBuckets,omitempty\" graphql:\"insertBuckets\"" InsertFile *Files "json:\"insertFile,omitempty\" graphql:\"insertFile\"" InsertFiles *FilesMutationResponse "json:\"insertFiles,omitempty\" graphql:\"insertFiles\"" InsertVirus *Virus "json:\"insertVirus,omitempty\" graphql:\"insertVirus\"" InsertViruses *VirusMutationResponse "json:\"insertViruses,omitempty\" graphql:\"insertViruses\"" UpdateBucket *Buckets "json:\"updateBucket,omitempty\" graphql:\"updateBucket\"" UpdateBuckets *BucketsMutationResponse "json:\"updateBuckets,omitempty\" graphql:\"updateBuckets\"" UpdateFile *Files "json:\"updateFile,omitempty\" graphql:\"updateFile\"" UpdateFiles *FilesMutationResponse "json:\"updateFiles,omitempty\" graphql:\"updateFiles\"" UpdateVirus *Virus "json:\"updateVirus,omitempty\" graphql:\"updateVirus\"" UpdateViruses *VirusMutationResponse "json:\"updateViruses,omitempty\" graphql:\"updateViruses\"" UpdateBucketsMany []*BucketsMutationResponse "json:\"update_buckets_many,omitempty\" graphql:\"update_buckets_many\"" UpdateFilesMany []*FilesMutationResponse "json:\"update_files_many,omitempty\" graphql:\"update_files_many\"" UpdateVirusMany []*VirusMutationResponse "json:\"update_virus_many,omitempty\" graphql:\"update_virus_many\"" }
type OrderBy ¶ added in v0.4.0
type OrderBy string
column ordering options
const ( // in ascending order, nulls last OrderByAsc OrderBy = "asc" // in ascending order, nulls first OrderByAscNullsFirst OrderBy = "asc_nulls_first" // in ascending order, nulls last OrderByAscNullsLast OrderBy = "asc_nulls_last" // in descending order, nulls first OrderByDesc OrderBy = "desc" // in descending order, nulls first OrderByDescNullsFirst OrderBy = "desc_nulls_first" // in descending order, nulls last OrderByDescNullsLast OrderBy = "desc_nulls_last" )
func (OrderBy) MarshalGQL ¶ added in v0.4.0
func (*OrderBy) UnmarshalGQL ¶ added in v0.4.0
type QueryRoot ¶ added in v0.4.0
type QueryRoot struct { Bucket *Buckets "json:\"bucket,omitempty\" graphql:\"bucket\"" Buckets []*Buckets "json:\"buckets\" graphql:\"buckets\"" BucketsAggregate BucketsAggregate "json:\"bucketsAggregate\" graphql:\"bucketsAggregate\"" File *Files "json:\"file,omitempty\" graphql:\"file\"" Files []*Files "json:\"files\" graphql:\"files\"" FilesAggregate FilesAggregate "json:\"filesAggregate\" graphql:\"filesAggregate\"" Virus *Virus "json:\"virus,omitempty\" graphql:\"virus\"" Viruses []*Virus "json:\"viruses\" graphql:\"viruses\"" VirusesAggregate VirusAggregate "json:\"virusesAggregate\" graphql:\"virusesAggregate\"" }
type StringComparisonExp ¶ added in v0.4.0
type StringComparisonExp struct { Eq *string `json:"_eq,omitempty"` Gt *string `json:"_gt,omitempty"` Gte *string `json:"_gte,omitempty"` // does the column match the given case-insensitive pattern Ilike *string `json:"_ilike,omitempty"` In []string `json:"_in,omitempty"` // does the column match the given POSIX regular expression, case insensitive Iregex *string `json:"_iregex,omitempty"` IsNull *bool `json:"_is_null,omitempty"` // does the column match the given pattern Like *string `json:"_like,omitempty"` Lt *string `json:"_lt,omitempty"` Lte *string `json:"_lte,omitempty"` Neq *string `json:"_neq,omitempty"` // does the column NOT match the given case-insensitive pattern Nilike *string `json:"_nilike,omitempty"` Nin []string `json:"_nin,omitempty"` // does the column NOT match the given POSIX regular expression, case insensitive Niregex *string `json:"_niregex,omitempty"` // does the column NOT match the given pattern Nlike *string `json:"_nlike,omitempty"` // does the column NOT match the given POSIX regular expression, case sensitive Nregex *string `json:"_nregex,omitempty"` // does the column NOT match the given SQL regular expression Nsimilar *string `json:"_nsimilar,omitempty"` // does the column match the given POSIX regular expression, case sensitive Regex *string `json:"_regex,omitempty"` // does the column match the given SQL regular expression Similar *string `json:"_similar,omitempty"` }
Boolean expression to compare columns of type "String". All fields are combined with logical 'AND'.
type SubscriptionRoot ¶ added in v0.4.0
type SubscriptionRoot struct { // fetch data from the table: "storage.buckets" using primary key columns Bucket *Buckets `json:"bucket,omitempty"` // fetch data from the table: "storage.buckets" Buckets []*Buckets `json:"buckets"` // fetch aggregated fields from the table: "storage.buckets" BucketsAggregate BucketsAggregate `json:"bucketsAggregate"` // fetch data from the table in a streaming manner: "storage.buckets" BucketsStream []*Buckets `json:"buckets_stream"` // fetch data from the table: "storage.files" using primary key columns File *Files `json:"file,omitempty"` // An array relationship Files []*Files `json:"files"` // fetch aggregated fields from the table: "storage.files" FilesAggregate FilesAggregate `json:"filesAggregate"` // fetch data from the table in a streaming manner: "storage.files" FilesStream []*Files `json:"files_stream"` // fetch data from the table: "storage.virus" using primary key columns Virus *Virus `json:"virus,omitempty"` // fetch data from the table in a streaming manner: "storage.virus" VirusStream []*Virus `json:"virus_stream"` // fetch data from the table: "storage.virus" Viruses []*Virus `json:"viruses"` // fetch aggregated fields from the table: "storage.virus" VirusesAggregate VirusAggregate `json:"virusesAggregate"` }
type TimestamptzComparisonExp ¶ added in v0.4.0
type TimestamptzComparisonExp struct { Eq *string `json:"_eq,omitempty"` Gt *string `json:"_gt,omitempty"` Gte *string `json:"_gte,omitempty"` In []string `json:"_in,omitempty"` IsNull *bool `json:"_is_null,omitempty"` Lt *string `json:"_lt,omitempty"` Lte *string `json:"_lte,omitempty"` Neq *string `json:"_neq,omitempty"` Nin []string `json:"_nin,omitempty"` }
Boolean expression to compare columns of type "timestamptz". All fields are combined with logical 'AND'.
type UUIDComparisonExp ¶ added in v0.4.0
type UUIDComparisonExp struct { Eq *string `json:"_eq,omitempty"` Gt *string `json:"_gt,omitempty"` Gte *string `json:"_gte,omitempty"` In []string `json:"_in,omitempty"` IsNull *bool `json:"_is_null,omitempty"` Lt *string `json:"_lt,omitempty"` Lte *string `json:"_lte,omitempty"` Neq *string `json:"_neq,omitempty"` Nin []string `json:"_nin,omitempty"` }
Boolean expression to compare columns of type "uuid". All fields are combined with logical 'AND'.
type UpdateFile ¶ added in v0.4.0
type UpdateFile struct {
UpdateFile *FileMetadataFragment "json:\"updateFile,omitempty\" graphql:\"updateFile\""
}
func (*UpdateFile) GetUpdateFile ¶ added in v0.4.0
func (t *UpdateFile) GetUpdateFile() *FileMetadataFragment
type Virus ¶ added in v0.4.0
type Virus struct { CreatedAt string `json:"createdAt"` // An object relationship File Files `json:"file"` FileID string `json:"fileId"` Filename string `json:"filename"` ID string `json:"id"` UpdatedAt string `json:"updatedAt"` UserSession map[string]interface{} `json:"userSession"` Virus string `json:"virus"` }
columns and relationships of "storage.virus"
type VirusAggregate ¶ added in v0.4.0
type VirusAggregate struct { Aggregate *VirusAggregateFields `json:"aggregate,omitempty"` Nodes []*Virus `json:"nodes"` }
aggregated selection of "storage.virus"
type VirusAggregateFields ¶ added in v0.4.0
type VirusAggregateFields struct { Count int64 `json:"count"` Max *VirusMaxFields `json:"max,omitempty"` Min *VirusMinFields `json:"min,omitempty"` }
aggregate fields of "storage.virus"
type VirusAppendInput ¶ added in v0.4.0
type VirusAppendInput struct {
UserSession map[string]interface{} `json:"userSession,omitempty"`
}
append existing jsonb value of filtered columns with new jsonb value
type VirusBoolExp ¶ added in v0.4.0
type VirusBoolExp struct { And []*VirusBoolExp `json:"_and,omitempty"` Not *VirusBoolExp `json:"_not,omitempty"` Or []*VirusBoolExp `json:"_or,omitempty"` CreatedAt *TimestamptzComparisonExp `json:"createdAt,omitempty"` File *FilesBoolExp `json:"file,omitempty"` FileID *UUIDComparisonExp `json:"fileId,omitempty"` Filename *StringComparisonExp `json:"filename,omitempty"` ID *UUIDComparisonExp `json:"id,omitempty"` UpdatedAt *TimestamptzComparisonExp `json:"updatedAt,omitempty"` UserSession *JsonbComparisonExp `json:"userSession,omitempty"` Virus *StringComparisonExp `json:"virus,omitempty"` }
Boolean expression to filter rows from the table "storage.virus". All fields are combined with a logical 'AND'.
type VirusConstraint ¶ added in v0.4.0
type VirusConstraint string
unique or primary key constraints on table "storage.virus"
const ( // unique or primary key constraint on columns "id" VirusConstraintVirusPkey VirusConstraint = "virus_pkey" )
func (VirusConstraint) IsValid ¶ added in v0.4.0
func (e VirusConstraint) IsValid() bool
func (VirusConstraint) MarshalGQL ¶ added in v0.4.0
func (e VirusConstraint) MarshalGQL(w io.Writer)
func (VirusConstraint) String ¶ added in v0.4.0
func (e VirusConstraint) String() string
func (*VirusConstraint) UnmarshalGQL ¶ added in v0.4.0
func (e *VirusConstraint) UnmarshalGQL(v interface{}) error
type VirusDeleteAtPathInput ¶ added in v0.4.0
type VirusDeleteAtPathInput struct {
UserSession []string `json:"userSession,omitempty"`
}
delete the field or element with specified path (for JSON arrays, negative integers count from the end)
type VirusDeleteElemInput ¶ added in v0.4.0
type VirusDeleteElemInput struct {
UserSession *int64 `json:"userSession,omitempty"`
}
delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array
type VirusDeleteKeyInput ¶ added in v0.4.0
type VirusDeleteKeyInput struct {
UserSession *string `json:"userSession,omitempty"`
}
delete key/value pair or string element. key/value pairs are matched based on their key value
type VirusInsertInput ¶ added in v0.4.0
type VirusInsertInput struct { CreatedAt *string `json:"createdAt,omitempty"` File *FilesObjRelInsertInput `json:"file,omitempty"` FileID *string `json:"fileId,omitempty"` Filename *string `json:"filename,omitempty"` ID *string `json:"id,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` UserSession map[string]interface{} `json:"userSession,omitempty"` Virus *string `json:"virus,omitempty"` }
input type for inserting data into table "storage.virus"
type VirusMaxFields ¶ added in v0.4.0
type VirusMaxFields struct { CreatedAt *string `json:"createdAt,omitempty"` FileID *string `json:"fileId,omitempty"` Filename *string `json:"filename,omitempty"` ID *string `json:"id,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` Virus *string `json:"virus,omitempty"` }
aggregate max on columns
type VirusMinFields ¶ added in v0.4.0
type VirusMinFields struct { CreatedAt *string `json:"createdAt,omitempty"` FileID *string `json:"fileId,omitempty"` Filename *string `json:"filename,omitempty"` ID *string `json:"id,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` Virus *string `json:"virus,omitempty"` }
aggregate min on columns
type VirusMutationResponse ¶ added in v0.4.0
type VirusMutationResponse struct { // number of rows affected by the mutation AffectedRows int64 `json:"affected_rows"` // data from the rows affected by the mutation Returning []*Virus `json:"returning"` }
response of any mutation on the table "storage.virus"
type VirusOnConflict ¶ added in v0.4.0
type VirusOnConflict struct { Constraint VirusConstraint `json:"constraint"` UpdateColumns []VirusUpdateColumn `json:"update_columns"` Where *VirusBoolExp `json:"where,omitempty"` }
on_conflict condition type for table "storage.virus"
type VirusOrderBy ¶ added in v0.4.0
type VirusOrderBy struct { CreatedAt *OrderBy `json:"createdAt,omitempty"` File *FilesOrderBy `json:"file,omitempty"` FileID *OrderBy `json:"fileId,omitempty"` Filename *OrderBy `json:"filename,omitempty"` ID *OrderBy `json:"id,omitempty"` UpdatedAt *OrderBy `json:"updatedAt,omitempty"` UserSession *OrderBy `json:"userSession,omitempty"` Virus *OrderBy `json:"virus,omitempty"` }
Ordering options when selecting data from "storage.virus".
type VirusPkColumnsInput ¶ added in v0.4.0
type VirusPkColumnsInput struct {
ID string `json:"id"`
}
primary key columns input for table: storage.virus
type VirusPrependInput ¶ added in v0.4.0
type VirusPrependInput struct {
UserSession map[string]interface{} `json:"userSession,omitempty"`
}
prepend existing jsonb value of filtered columns with new jsonb value
type VirusSelectColumn ¶ added in v0.4.0
type VirusSelectColumn string
select columns of table "storage.virus"
const ( // column name VirusSelectColumnCreatedAt VirusSelectColumn = "createdAt" // column name VirusSelectColumnFileID VirusSelectColumn = "fileId" // column name VirusSelectColumnFilename VirusSelectColumn = "filename" // column name VirusSelectColumnID VirusSelectColumn = "id" // column name VirusSelectColumnUpdatedAt VirusSelectColumn = "updatedAt" // column name VirusSelectColumnUserSession VirusSelectColumn = "userSession" // column name VirusSelectColumnVirus VirusSelectColumn = "virus" )
func (VirusSelectColumn) IsValid ¶ added in v0.4.0
func (e VirusSelectColumn) IsValid() bool
func (VirusSelectColumn) MarshalGQL ¶ added in v0.4.0
func (e VirusSelectColumn) MarshalGQL(w io.Writer)
func (VirusSelectColumn) String ¶ added in v0.4.0
func (e VirusSelectColumn) String() string
func (*VirusSelectColumn) UnmarshalGQL ¶ added in v0.4.0
func (e *VirusSelectColumn) UnmarshalGQL(v interface{}) error
type VirusSetInput ¶ added in v0.4.0
type VirusSetInput struct { CreatedAt *string `json:"createdAt,omitempty"` FileID *string `json:"fileId,omitempty"` Filename *string `json:"filename,omitempty"` ID *string `json:"id,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` UserSession map[string]interface{} `json:"userSession,omitempty"` Virus *string `json:"virus,omitempty"` }
input type for updating data in table "storage.virus"
type VirusStreamCursorInput ¶ added in v0.4.0
type VirusStreamCursorInput struct { // Stream column input with initial value InitialValue VirusStreamCursorValueInput `json:"initial_value"` // cursor ordering Ordering *CursorOrdering `json:"ordering,omitempty"` }
Streaming cursor of the table "virus"
type VirusStreamCursorValueInput ¶ added in v0.4.0
type VirusStreamCursorValueInput struct { CreatedAt *string `json:"createdAt,omitempty"` FileID *string `json:"fileId,omitempty"` Filename *string `json:"filename,omitempty"` ID *string `json:"id,omitempty"` UpdatedAt *string `json:"updatedAt,omitempty"` UserSession map[string]interface{} `json:"userSession,omitempty"` Virus *string `json:"virus,omitempty"` }
Initial value of the column from where the streaming should start
type VirusUpdateColumn ¶ added in v0.4.0
type VirusUpdateColumn string
update columns of table "storage.virus"
const ( // column name VirusUpdateColumnCreatedAt VirusUpdateColumn = "createdAt" // column name VirusUpdateColumnFileID VirusUpdateColumn = "fileId" // column name VirusUpdateColumnFilename VirusUpdateColumn = "filename" // column name VirusUpdateColumnID VirusUpdateColumn = "id" // column name VirusUpdateColumnUpdatedAt VirusUpdateColumn = "updatedAt" // column name VirusUpdateColumnUserSession VirusUpdateColumn = "userSession" // column name VirusUpdateColumnVirus VirusUpdateColumn = "virus" )
func (VirusUpdateColumn) IsValid ¶ added in v0.4.0
func (e VirusUpdateColumn) IsValid() bool
func (VirusUpdateColumn) MarshalGQL ¶ added in v0.4.0
func (e VirusUpdateColumn) MarshalGQL(w io.Writer)
func (VirusUpdateColumn) String ¶ added in v0.4.0
func (e VirusUpdateColumn) String() string
func (*VirusUpdateColumn) UnmarshalGQL ¶ added in v0.4.0
func (e *VirusUpdateColumn) UnmarshalGQL(v interface{}) error
type VirusUpdates ¶ added in v0.4.0
type VirusUpdates struct { // append existing jsonb value of filtered columns with new jsonb value Append *VirusAppendInput `json:"_append,omitempty"` // delete the field or element with specified path (for JSON arrays, negative integers count from the end) DeleteAtPath *VirusDeleteAtPathInput `json:"_delete_at_path,omitempty"` // delete the array element with specified index (negative integers count from the end). throws an error if top level container is not an array DeleteElem *VirusDeleteElemInput `json:"_delete_elem,omitempty"` // delete key/value pair or string element. key/value pairs are matched based on their key value DeleteKey *VirusDeleteKeyInput `json:"_delete_key,omitempty"` // prepend existing jsonb value of filtered columns with new jsonb value Prepend *VirusPrependInput `json:"_prepend,omitempty"` // sets the columns of the filtered rows to the given values Set *VirusSetInput `json:"_set,omitempty"` // filter the rows which have to be updated Where VirusBoolExp `json:"where"` }