Documentation ¶
Index ¶
- Constants
- Variables
- func AddParam(name, sqlName string, params Record, conds []string, args []interface{}) ([]string, []interface{})
- func CheckPattern(key, value string) error
- func CheckQueryParameters(r *http.Request, api string) error
- func CleanStatement(stm string) string
- func CreateInvalidParamError(param string, api string) error
- func Date() int64
- func DecodeValidatorError(r, err interface{}) error
- func Error(err error, code int, msg, function string) error
- func FlatLumis(val interface{}) ([]string, error)
- func GetBlocks(rurl, val string) ([]string, error)
- func GetChunks(vals []string, limit int) []string
- func GetID(tx *sql.Tx, table, id, attr string, val ...interface{}) (int64, error)
- func GetMigrationBlocksInOrder(mblocks []MigrationBlock) []string
- func GetParents(rurl, val string) ([]string, error)
- func GetRecID(tx *sql.Tx, rec DBRecord, table, id, attr string, val ...interface{}) (int64, error)
- func GetTestData() error
- func HttpClient(key, cert string, tout int) *http.Client
- func IfExist(tx *sql.Tx, table, rid, attr string, val ...interface{}) bool
- func IfExistMulti(tx *sql.Tx, table, rid string, args []string, vals ...interface{}) bool
- func IncrementSequence(tx *sql.Tx, seq string) (int64, error)
- func IncrementSequences(tx *sql.Tx, seq string, n int) ([]int64, error)
- func InsertFileLumisTxViaChunks(tx *sql.Tx, table string, records []FileLumis) error
- func LastInsertID(tx *sql.Tx, table, idName string) (int64, error)
- func LoadPatterns(fname string) (map[string]LexiconPattern, error)
- func LoadTemplateSQL(tmpl string, tmplData Record) (string, error)
- func MigrationCleanupServer(interval int, offset int64, ch <-chan bool)
- func MigrationServer(interval, timeout int, ch <-chan bool)
- func OperatorValue(arg string) (string, string)
- func ParseDBFile(dbfile string) (string, string, string)
- func ParseRuns(runs []string) ([]string, error)
- func QueryRow(table, id, attr string, val interface{}) (int64, error)
- func RunsConditions(runs []string, table string) (string, []string, []interface{}, error)
- func StartMigrationRequest(rec MigrationRequest)
- func TokenCondition() string
- func TokenGenerator(runs []string, limit int, name string) (string, []string)
- func TokenGeneratorORACLE(runs []string, limit int, name string) (string, []string)
- func TokenGeneratorSQLite(runs []string, name string) (string, []string)
- func Validate(r *http.Request) error
- func ValidateParameter(params Record, key string) (string, error)
- func ValidatePostPayload(rec Record) error
- func WhereClause(stm string, conds []string) string
- type API
- func (a *API) AcquisitionEras() error
- func (a *API) AcquisitionErasCi() error
- func (a *API) BlockChildren() error
- func (a *API) BlockDump() error
- func (a *API) BlockFileLumiIds() error
- func (a *API) BlockOrigin() error
- func (a *API) BlockParents() error
- func (a *API) BlockSummaries() error
- func (a *API) Blocks() error
- func (a *API) CancelMigration() error
- func (a *API) CleanupMigrationRequests(offset int64) error
- func (a *API) DataTiers() error
- func (a *API) DataTypes() error
- func (a *API) DatasetAccessTypes() error
- func (a *API) DatasetChildren() error
- func (a *API) DatasetList() error
- func (a *API) DatasetOutputModConfigs() error
- func (a *API) DatasetParents() error
- func (a *API) Datasets() error
- func (a *API) Dummy() []Record
- func (a *API) FileArray() error
- func (a *API) FileChildren() error
- func (a *API) FileDataTypes() error
- func (a *API) FileLumis() error
- func (a *API) FileOutputModConfigs() error
- func (a *API) FileParents() error
- func (a *API) FileParentsByLumi() error
- func (a *API) FileSummaries() error
- func (a *API) Files() error
- func (a *API) InsertAcquisitionEras() error
- func (a *API) InsertApplicationExecutables() error
- func (a *API) InsertBlocks() error
- func (a *API) InsertBranchHashes() error
- func (a *API) InsertBulkBlocks() error
- func (a *API) InsertBulkBlocksConcurrently() error
- func (a *API) InsertDataTiers() error
- func (a *API) InsertDataTypes() error
- func (a *API) InsertDatasetAccessTypes() error
- func (a *API) InsertDatasetChildren() error
- func (a *API) InsertDatasetList() error
- func (a *API) InsertDatasetOutputModConfigs() error
- func (a *API) InsertDatasets() error
- func (a *API) InsertFileArray() error
- func (a *API) InsertFileChildren() error
- func (a *API) InsertFileDataTypes() error
- func (a *API) InsertFileLumisTx(tx *sql.Tx) error
- func (a *API) InsertFileOutputModConfigs(tx *sql.Tx) error
- func (a *API) InsertFileParents() error
- func (a *API) InsertFileParentsBlockTxt(tx *sql.Tx) error
- func (a *API) InsertFileParentsByLumi() error
- func (a *API) InsertFileSummaries() error
- func (a *API) InsertFiles() error
- func (a *API) InsertOutputConfigs() error
- func (a *API) InsertOutputConfigsTx(tx *sql.Tx) error
- func (a *API) InsertPhysicsGroups() error
- func (a *API) InsertPrimaryDSTypes() error
- func (a *API) InsertPrimaryDatasets() error
- func (a *API) InsertProcessedDatasets() error
- func (a *API) InsertProcessingEras() error
- func (a *API) InsertReleaseVersions() error
- func (a *API) InsertRuns() error
- func (a *API) OutputConfigs() error
- func (a *API) OutputModules() error
- func (a *API) ParentDSTrio() error
- func (a *API) ParentDatasetFileLumiIds() error
- func (a *API) PhysicsGroups() error
- func (a *API) PrimaryDSTypes() error
- func (a *API) PrimaryDatasets() error
- func (a *API) ProcessMigration()
- func (a *API) ProcessMigrationCtx(timeout int) error
- func (a *API) ProcessedDatasets() error
- func (a *API) ProcessingEras() error
- func (a *API) ReleaseVersions() error
- func (a *API) RemoveMigration() error
- func (a *API) RunSummaries() error
- func (a *API) Runs() error
- func (a *API) SelectFileLumiListInsert(tx *sql.Tx, fll []FileLumi, tempTable string, fileID int64, function string) error
- func (a *API) StatusMigration() error
- func (a *API) String() string
- func (a *API) SubmitMigration() error
- func (a *API) TotalMigration() error
- func (a *API) UpdateAcquisitionEras() error
- func (a *API) UpdateBlockStats(tx *sql.Tx, blockID int64) error
- func (a *API) UpdateBlocks() error
- func (a *API) UpdateDatasets() error
- func (a *API) UpdateFiles() error
- type AcquisitionEra
- type AcquisitionEras
- type ApiParameters
- type ApiParametersMap
- type ApplicationExecutables
- type Block
- type BlockDumpRecord
- type BlockParent
- type BlockParentList
- type BlockParents
- type BlockRecord
- type BlockResponse
- type Blocks
- type BranchHashes
- type BulkBlocks
- type DBInfo
- type DBRecord
- type DBSError
- type DataTiers
- type Dataset
- type DatasetAccessTypes
- type DatasetConfig
- type DatasetConfigList
- type DatasetOutputModConfigs
- type DatasetParent
- type DatasetParentList
- type DatasetParents
- type DatasetRecord
- type DatasetResponse
- type DatasetShortRecord
- type Datasets
- type File
- type FileConfig
- type FileConfigList
- type FileDataTypes
- type FileList
- type FileLumi
- type FileLumis
- type FileOutputModConfigRecord
- type FileOutputModConfigs
- type FileParentBlockRecord
- type FileParentLFNRecord
- type FileParentList
- type FileParentRecord
- type FileParents
- type FileRecord
- type Files
- type Lexicon
- type LexiconPattern
- type MigrationBlock
- type MigrationBlocks
- type MigrationRemoveRequest
- type MigrationReport
- type MigrationRequest
- type MigrationStatusRequest
- type ObjectPattern
- type OutputConfigRecord
- type OutputConfigs
- type ParameterSetHashes
- type PhysicsGroups
- type PrimaryDSTypes
- type PrimaryDataset
- type PrimaryDatasetRecord
- type PrimaryDatasets
- type ProcessedDatasets
- type ProcessingEra
- type ProcessingEras
- type PyFileRecord
- type Record
- type ReleaseVersions
- type SchemaIndex
- type SchemaInfo
- type StrPattern
- type TLSCertsManager
- type TableIndex
- type TableInfo
- type TempFileRecord
Constants ¶
const ( GenericErrorCode = iota + 100 // generic DBS error DatabaseErrorCode // 101 database error TransactionErrorCode // 102 transaction error QueryErrorCode // 103 query error RowsScanErrorCode // 104 row scan error SessionErrorCode // 105 db session error CommitErrorCode // 106 db commit error ParseErrorCode // 107 parser error LoadErrorCode // 108 loading error, e.g. load template GetIDErrorCode // 109 get id db error InsertErrorCode // 110 db insert error UpdateErrorCode // 111 update error LastInsertErrorCode // 112 db last insert error ValidateErrorCode // 113 validation error PatternErrorCode // 114 pattern error DecodeErrorCode // 115 decode error EncodeErrorCode // 116 encode error ContentTypeErrorCode // 117 content type error ParametersErrorCode // 118 parameters error NotImplementedApiCode // 119 not implemented API error ReaderErrorCode // 120 io reader error WriterErrorCode // 121 io writer error UnmarshalErrorCode // 122 json unmarshal error MarshalErrorCode // 123 marshal error HttpRequestErrorCode // 124 HTTP request error MigrationErrorCode // 125 Migration error RemoveErrorCode // 126 remove error InvalidRequestErrorCode // 127 invalid request error BlockAlreadyExists // 128 block xxx already exists in DBS FileDataTypesDoesNotExist // 129 FileDataTypes does not exist in DBS FileParentDoesNotExist // 130 FileParent does not exist in DBS DatasetParentDoesNotExist // 131 DatasetParent does not exist in DBS ProcessedDatasetDoesNotExist // 132 ProcessedDataset does not exist in DBS PrimaryDatasetTypeDoesNotExist // 133 PrimaryDatasetType does not exist in DBS PrimaryDatasetDoesNotExist // 134 PrimaryDataset does not exist in DBS ProcessingEraDoesNotExist // 135 ProcessingEra does not exist in DBS AcquisitionEraDoesNotExist // 136 AcquisitionEra does not exist in DBS DataTierDoesNotExist // 137 DataTier does not exist in DBS PhysicsGroupDoesNotExist // 138 PhysicsGroup does not exist in DBS DatasetAccessTypeDoesNotExist // 139 DatasetAccessType does not exist in DBS DatasetDoesNotExist // 140 Dataset does not exist in DBS LastAvailableErrorCode // last available DBS error code )
DBS Error codes provides static representation of DBS errors, they cover 1xx range
const ( PENDING = iota IN_PROGRESS COMPLETED FAILED EXIST_IN_DB QUEUED TERM_FAILED = 9 )
MigrationCodes represents all migration codes
Variables ¶
var ApiParametersFile string
ApiParameterFile represents API parameter file
var Cert string
Cert represents DBS X509 cert used by HttpClient
var Ckey string
Ckey represents DBS X509 key used by HttpClient
var ConcurrencyErr = errors.New("concurrency error")
ConcurrencyErr represents generic concurrency error
var ConcurrentBulkBlocks bool
ConcurrentBulkBlocks defines if code should use concurrent bulkblocks API
var ContentTypeErr = errors.New("content-type error")
ContentTypeErr represents generic content-type error
var DB *sql.DB
DB represents sql DB pointer
var DBOWNER string
DBOWNER represents DBS DB owner
var DBTYPE string
DBTYPE represents DBS DB type, e.g. ORACLE or SQLite
var DRYRUN bool
DRYRUN allows to skip query execution and printout DB statements along with passed parameters
var DatabaseErr = errors.New("database error")
DatabaseErr represents generic database error
var FileChunkSize int
FileChunkSize controls size of chunk for []File insertion
var FileLumiChunkSize int
FileLumiChunkSize controls chunk size for FileLumi list insertion
var FileLumiInsertMethod string
FileLumiInsertMethod controls which method to use for insertion of FileLumi list
var FileLumiMaxSize int
FileLumiMaxSize controls max size for FileLumi list insertion
var GenericErr = errors.New("dbs error")
GenericErr represents generic dbs error
var InvalidParamErr = errors.New("invalid parameter(s)")
InvalidParamErr represents generic error for invalid input parameter
var InvalidRequestErr = errors.New("invalid request error")
InvalidRequestErr represents generic invalid request error
var LexiconPatterns map[string]LexiconPattern
LexiconPatterns represents CMS Lexicon patterns
var MigrateURL string
MigrateURL holds URL of DBSMigrate server
var MigrationAsyncTimeout int
MigrationAsyncTimeout defines timeout of asynchrounous migration request process
var MigrationCleanupInterval int
MigrationCleanupInterval defines migration cleanup server interval
var MigrationCleanupOffset int64
MigrationCleanupOffset defines offset in seconds to delete migration requests
var MigrationDB *sql.DB
MigrationDB points to migration DB
var MigrationProcessTimeout int
MigrationProcessTimeout defines migration process timeout
var MigrationRetries int64
MigrationRetries specifies total number of migration retries
var MigrationServerInterval int
MigrationServerInterval defines migration process timeout
var NotImplementedApiErr = errors.New("not implemented api error")
NotImplementedApiErr represents generic not implemented api error
var RecordErr = errors.New("record error")
RecordErr represents generic record error
var RecordValidator *validator.Validate
RecordValidator pointer to validator Validate method
var Timeout int
Timeout represents DBS timeout used by HttpClient
var TlsRefreshInterval int64
TlsRefreshInterval represents refresh interval for Tls proxy
var TotalCompleted uint64
TotalCompleted represents total number of completed migration requests
var TotalExistInDB uint64
TotalExistInDB represents total number of exist in db migration requests
var TotalFailed uint64
TotalFailed represents total number of failed migration requests
var TotalInProgress uint64
TotalInProgress represents total number in progress migration requests
var TotalMigrationRequests uint64
TotalMigrationRequests counts total number of migration requests processed by this server
var TotalPending uint64
TotalPending represents total number pending migration requests
var TotalQueued uint64
TotalQueued represents total number of queued migration requests
var TotalTermFailed uint64
TotalTermFailed represents total number of terminally failed migration requests
var ValidationErr = errors.New("validation error")
ValidationErr represents generic validation error
Functions ¶
func AddParam ¶
func AddParam( name, sqlName string, params Record, conds []string, args []interface{}) ([]string, []interface{})
AddParam adds single parameter to SQL statement
func CheckPattern ¶
CheckPattern is a generic functino to check given key value within Lexicon map
func CheckQueryParameters ¶
CheckQueryParameters checks query parameters against API parameters map
func CleanStatement ¶
CleanStatement cleans the given SQL statement to remove empty strings, etc.
func CreateInvalidParamError ¶
CreateInvalidParamError creates the error for parameter validation
func DecodeValidatorError ¶
func DecodeValidatorError(r, err interface{}) error
DecodeValidatorError provides uniform error representation of DBRecord validation errors
func GetMigrationBlocksInOrder ¶
func GetMigrationBlocksInOrder(mblocks []MigrationBlock) []string
get list of migration blocks in order of processing (first parents then children)
func GetParents ¶
GetParents returns list of parents for given block or dataset
func GetRecID ¶
GetRecID function fetches table primary id for a given value and insert it if necessary
func GetTestData ¶
func GetTestData() error
GetTestData executes simple query to ensure that connection to DB is valid. So far we can ask for a data tier id of specific tier since this table is very small and query execution will be really fast.
func HttpClient ¶
HttpClient is HTTP client for urlfetch server
func IfExistMulti ¶
IfExistMulti checks if given rid exists in given table for provided value conditions
func IncrementSequence ¶
IncrementSequence API returns single unique ID for a given sequence
func IncrementSequences ¶
IncrementSequences API provide a way to get N unique IDs for given sequence name
func InsertFileLumisTxViaChunks ¶
InsertFileLumisTxViaChunks DBS API
func LastInsertID ¶
LastInsertID returns last insert id of given table and idname parameter
func LoadPatterns ¶
func LoadPatterns(fname string) (map[string]LexiconPattern, error)
LoadPatterns loads CMS Lexion patterns from given file the format of the file is a list of the following dicts: [ {"name": <name>, "patterns": [list of patterns], "length": int},...]
func LoadTemplateSQL ¶
LoadTemplateSQL function loads DBS SQL templated statements
func MigrationCleanupServer ¶
MigrationCleanupServer represents migration cleanup daemon..
func MigrationServer ¶
MigrationServer represent migration server. it accepts migration process timeout used by ProcessMigration API and exit channel
func OperatorValue ¶
OperatorValue function generates operator and value pair for a given argument
func ParseDBFile ¶
ParseDBFile function parses given file name and extracts from it dbtype and dburi file should contain the "dbtype dburi" string
func RunsConditions ¶
RunsConditions function to handle runs conditions
func StartMigrationRequest ¶
func StartMigrationRequest(rec MigrationRequest)
StartMigrationRequest starts asynchronously migration request process via goroutine with timeout context the code is based on the following example: https://medium.com/geekculture/timeout-context-in-go-e88af0abd08d
func TokenCondition ¶
func TokenCondition() string
TokenCondition provides proper condition statement for TokenGenerator
func TokenGenerator ¶
TokenGenerator creates a SQL token generator statement
func TokenGeneratorORACLE ¶
TokenGeneratorORACLE creates a SQL token generator statement using ORACLE syntax https://betteratoracle.com/posts/20-how-do-i-bind-a-variable-in-list
func TokenGeneratorSQLite ¶
TokenGeneratorSQLite creates a SQL token generator statement using SQLite syntax https://stackoverflow.com/questions/67372811/what-is-equivalent-of-token-generator-oracle-sql-statement-in-sqlite
func ValidateParameter ¶
Validate POST/PUT Parameters
func ValidatePostPayload ¶
ValidatePostPayload function to validate POST request
func WhereClause ¶
WhereClause function construct proper SQL statement from given statement and list of conditions
Types ¶
type API ¶
type API struct { Reader io.Reader // reader to read data payload Writer http.ResponseWriter // writer to write results back to client Context context.Context // HTTP context Params Record // HTTP input parameters Separator string // string separator for ndjson format CreateBy string // create by value from run-time Api string // api name }
API structure represents DBS API. Each API has reader (to read HTTP POST payload), HTTP writer to write results back to client, HTTP context, input HTTP GET paramers, separator for writer, create by and api string values passed at run-time.
func (*API) CancelMigration ¶
CancelMigration clean-ups migration requests in DB
func (*API) CleanupMigrationRequests ¶
CleanupMigrationRequests clean-ups migration requests in DB
func (*API) DatasetAccessTypes ¶
DatasetAccessTypes DBS API
func (*API) DatasetOutputModConfigs ¶
DatasetOutputModConfigs DBS API
func (*API) FileOutputModConfigs ¶
FileOutputModConfigs DBS API
func (*API) InsertAcquisitionEras ¶
InsertAcquisitionEras DBS API
func (*API) InsertApplicationExecutables ¶
InsertApplicationExecutables DBS API
func (*API) InsertBlocks ¶
InsertBlocks DBS API implement the following logic input values: blockname optional values: open_for_writing, origin_site(name), block_size, file_count, creation_date, create_by, last_modification_date, last_modified_by It insert given data in the following steps: - obtain dataset_id from given ds_name - increment block id - insert block input
func (*API) InsertBranchHashes ¶
InsertBranchHashes DBS API
func (*API) InsertBulkBlocks ¶
InsertBulkBlocks DBS API. It relies on BulkBlocks record which by itself contains series of other records. The logic of this API is the following: we read dataset_conf_list part of the record and insert output config data, then we insert recursively PrimaryDSTypes, PrimaryDataset, ProcessingEras, AcquisitionEras, ..., Datasets, Blocks, Files, FileLumis, FileCofig list, and dataset parent lists.
func (*API) InsertBulkBlocksConcurrently ¶
InsertBulkBlocksConcurrently DBS API provides concurrent bulk blocks insertion. It inherits the same logic as BulkBlocks API but perform Files and FileLumis injection concurrently via chunk of record. It relies on the following parameters:
- FileChunkSize defines number of concurrent goroutines executing injection into FILES table - FileLumiChunkSize/FileLumiMaxSize defines concurrent injection into FILE_LUMIS table. The former specifies chunk size while latter total number of records to be inserted at once to ORABLE DB - FileLumiInsertMethod defines which method to use for workflow execution, so far we support temptable, chunks, and sequential methods. The temptable uses ORACLE TEMPTABLE approach, chunks uses direct tables, and sequential method fallback to record by record injection (no goroutines).
func (*API) InsertDatasetAccessTypes ¶
InsertDatasetAccessTypes DBS API
func (*API) InsertDatasetChildren ¶
InsertDatasetChildren DBS API
func (*API) InsertDatasetOutputModConfigs ¶
InsertDatasetOutputModConfigs DBS API
func (*API) InsertDatasets ¶
InsertDatasets DBS API implements the following logic:
- take given input and insert - primary dataset info - acquisition era info - physics group info - processing era info - output module config info - insert dataset info
func (*API) InsertFileChildren ¶
InsertFileChildren DBS API
func (*API) InsertFileDataTypes ¶
InsertFileDataTypes DBS API
func (*API) InsertFileLumisTx ¶
InsertFileLumisTx DBS API
func (*API) InsertFileOutputModConfigs ¶
InsertFileOutputModConfigs DBS API
func (*API) InsertFileParents ¶
InsertFileParents DBS API is used by /fileparents end-point it accepts FileParentBlockRecord
func (*API) InsertFileParentsBlockTxt ¶
InsertFileParentsBlockTxt DBS API
func (*API) InsertFileParentsByLumi ¶
InsertFileParentsByLumi DBS API
func (*API) InsertFileSummaries ¶
InsertFileSummaries DBS API
func (*API) InsertFiles ¶
InsertFiles DBS API implements the following logic:
- extract dataset id for given dataset - extract output mod config for given dataset - get block information for given block - get file type id for given file type - loop over file parents and insert associative information into file tables - insert block parentage info - insert dataset parentage info - update block's info
func (*API) InsertOutputConfigs ¶
InsertOutputConfigs DBS API
func (*API) InsertOutputConfigsTx ¶
InsertOutputConfigsTx DBS API
func (*API) InsertPhysicsGroups ¶
InsertPhysicsGroups DBS API
func (*API) InsertPrimaryDSTypes ¶
InsertPrimaryDSTypes DBS API
func (*API) InsertPrimaryDatasets ¶
InsertPrimaryDatasets DBS API
func (*API) InsertProcessedDatasets ¶
InsertProcessedDatasets DBS API
func (*API) InsertProcessingEras ¶
InsertProcessingEras DBS API
func (*API) InsertReleaseVersions ¶
InsertReleaseVersions DBS API
func (*API) ParentDatasetFileLumiIds ¶
ParentDatasetFileLumiIds API
func (*API) ProcessMigration ¶
func (a *API) ProcessMigration()
ProcessMigration will process given migration request and inject data to source DBS It expects that client will provide migration_request_url and migration id
func (*API) ProcessMigrationCtx ¶
ProcessMigrationCtx will process given migration request and inject data to source DBS with timeout context It expects that client will provide migration_request_url and migration id
func (*API) SelectFileLumiListInsert ¶
func (a *API) SelectFileLumiListInsert(tx *sql.Tx, fll []FileLumi, tempTable string, fileID int64, function string) error
SelectFileLumiListInsert determines which of the three methods to insert the FileLumi list
func (*API) UpdateAcquisitionEras ¶
UpdateAcquisitionEras DBS API
func (*API) UpdateBlockStats ¶
UpdateBlockStats DBS API
type AcquisitionEra ¶
type AcquisitionEra struct { AcquisitionEraName string `json:"acquisition_era_name"` StartDate int64 `json:"start_date"` CreationDate int64 `json:"creation_date"` EndDate int64 `json:"end_date"` CreateBy string `json:"create_by"` Description string `json:"description"` }
AcquisitionEra represents AcquisitionEra structure use in BulkBlocks structure
type AcquisitionEras ¶
type AcquisitionEras struct { ACQUISITION_ERA_ID int64 `json:"acquisition_era_id"` ACQUISITION_ERA_NAME string `json:"acquisition_era_name" validate:"required"` START_DATE int64 `json:"start_date" validate:"required,number"` END_DATE int64 `json:"end_date"` CREATION_DATE int64 `json:"creation_date" validate:"required,number"` CREATE_BY string `json:"create_by" validate:"required"` DESCRIPTION string `json:"description"` }
AcquisitionEras represents Acquisition Eras DBS DB table
func (*AcquisitionEras) Decode ¶
func (r *AcquisitionEras) Decode(reader io.Reader) error
Decode implementation for AcquisitionEras
func (*AcquisitionEras) Insert ¶
func (r *AcquisitionEras) Insert(tx *sql.Tx) error
Insert implementation of AcquisitionEras
func (*AcquisitionEras) SetDefaults ¶
func (r *AcquisitionEras) SetDefaults()
SetDefaults implements set defaults for AcquisitionEras
func (*AcquisitionEras) Validate ¶
func (r *AcquisitionEras) Validate() error
Validate implementation of AcquisitionEras
type ApiParameters ¶
ApiParameters rerepresents a API parameters record
type ApiParametersMap ¶
ApiParametersMap represents data type of api parameters
var ApiParamMap ApiParametersMap
ApiParamMap an object which holds API parameter records
func LoadApiParameters ¶
func LoadApiParameters(fname string) (ApiParametersMap, error)
LoadApiParameters loads Api parameters and constructs ApiParameters map
type ApplicationExecutables ¶
type ApplicationExecutables struct { APP_EXEC_ID int64 `json:"app_exec_id"` APP_NAME string `json:"app_name" validate:"required"` }
ApplicationExecutables structure describe associative table in DBS DB
func (*ApplicationExecutables) Decode ¶
func (r *ApplicationExecutables) Decode(reader io.Reader) error
Decode implementation for ApplicationExecutables
func (*ApplicationExecutables) Insert ¶
func (r *ApplicationExecutables) Insert(tx *sql.Tx) error
Insert implementation of ApplicationExecutables
func (*ApplicationExecutables) SetDefaults ¶
func (r *ApplicationExecutables) SetDefaults()
SetDefaults implements set defaults for ApplicationExecutables
func (*ApplicationExecutables) Validate ¶
func (r *ApplicationExecutables) Validate() error
Validate implementation of ApplicationExecutables
type Block ¶
type Block struct { BlockID int64 `json:"block_id"` DatasetID int64 `json:"dataset_id"` CreateBy string `json:"create_by"` CreationDate int64 `json:"creation_date"` OpenForWriting int64 `json:"open_for_writing"` BlockName string `json:"block_name"` FileCount int64 `json:"file_count"` OriginSiteName string `json:"origin_site_name"` BlockSize int64 `json:"block_size"` LastModifiedBy string `json:"last_modified_by"` LastModificationDate int64 `json:"last_modification_date"` }
Block represents Block structure used in BulkBlocks structure
type BlockDumpRecord ¶
type BlockDumpRecord struct { BLOCK_ID int64 `json:"block_id"` BLOCK_NAME string `json:"block_name"` DATASET string `json:"dataset"` PRIMARY_DATASET string `json:"prim_ds"` FILES []string `json:"files"` BLOCK_PARENT_LIST string `json:"block_parent_list"` DATASET_PARENT_LIST string `json:"dataset_parent_list"` DS_PARENT_LIST string `json:"ds_parent_list"` // for compatibility with Py server FILE_CONF_LIST string `json:"file_conf_list"` FILE_PARENT_LIST string `json:"file_parent_list"` DATASET_CONF_LIST string `json:"dataset_conf_list"` }
BlockDumpRecord represents input block record used in BlockDump and InsertBlockDump APIs
func (*BlockDumpRecord) Decode ¶
func (r *BlockDumpRecord) Decode(reader io.Reader) error
Decode implementation for Blocks
func (*BlockDumpRecord) InsertBlockDump ¶
func (r *BlockDumpRecord) InsertBlockDump() error
InsertBlockDump insert block dump record into DBS
func (*BlockDumpRecord) SetDefaults ¶
func (r *BlockDumpRecord) SetDefaults()
SetDefaults implements set defaults for Blocks
func (*BlockDumpRecord) Validate ¶
func (r *BlockDumpRecord) Validate() error
Validate implementation of Blocks
type BlockParent ¶
type BlockParent struct { ParentBlockName string `json:"parent_block_name"` ThisBlockName string `json:"this_block_name"` }
BlockParent represents block parent structure used in BulkBlocks structure
type BlockParentList ¶
type BlockParentList []BlockParent
BlockParentList represents BlockParent records
type BlockParents ¶
type BlockParents struct { THIS_BLOCK_ID int64 `json:"this_block_id" validate:"required,number,gt=0"` PARENT_BLOCK_ID int64 `json:"parent_block_id" validate:"required,number,gt=0"` }
BlockParents structure represents block parents table in DBS DB
func (*BlockParents) Decode ¶
func (r *BlockParents) Decode(reader io.Reader) error
Decode implementation for BlockParents
func (*BlockParents) Insert ¶
func (r *BlockParents) Insert(tx *sql.Tx) error
Insert implementation of BlockParents
func (*BlockParents) SetDefaults ¶
func (r *BlockParents) SetDefaults()
SetDefaults implements set defaults for BlockParents
func (*BlockParents) Validate ¶
func (r *BlockParents) Validate() error
Validate implementation of BlockParents
type BlockRecord ¶
type BlockRecord struct { BLOCK_NAME string `json:"block_name"` OPEN_FOR_WRITING int64 `json:"open_for_writing"` ORIGIN_SITE_NAME string `json:"origin_site_name"` BLOCK_SIZE int64 `json:"block_size"` FILE_COUNT int64 `json:"file_count"` CREATION_DATE int64 `json:"creation_date"` CREATE_BY string `json:"create_by"` LAST_MODIFICATION_DATE int64 `json:"last_modification_date"` LAST_MODIFIED_BY string `json:"last_modified_by"` }
BlockRecord represents input record for insert blocks API
type BlockResponse ¶
BlockResponse represents block response structure used in GetParentBlocks
type Blocks ¶
type Blocks struct { BLOCK_ID int64 `json:"block_id"` BLOCK_NAME string `json:"block_name" validate:"required"` DATASET_ID int64 `json:"dataset_id" validate:"required,number,gt=0"` OPEN_FOR_WRITING int64 `json:"open_for_writing" validate:"number"` ORIGIN_SITE_NAME string `json:"origin_site_name" validate:"required"` BLOCK_SIZE int64 `json:"block_size" validate:"number"` FILE_COUNT int64 `json:"file_count" validate:"number"` CREATION_DATE int64 `json:"creation_date" validate:"required,number"` CREATE_BY string `json:"create_by" validate:"required"` LAST_MODIFICATION_DATE int64 `json:"last_modification_date" validate:"required,number"` LAST_MODIFIED_BY string `json:"last_modified_by" validate:"required"` }
Blocks represents Blocks DBS DB table
func (*Blocks) SetDefaults ¶
func (r *Blocks) SetDefaults()
SetDefaults implements set defaults for Blocks
type BranchHashes ¶
type BranchHashes struct { BRANCH_HASH_ID int64 `json:"branch_hash_id"` BRANCH_HASH string `json:"branch_hash"` CONTENT string `json:"content"` }
BranchHashes represents Branch Hashes DBS DB table
func (*BranchHashes) Decode ¶
func (r *BranchHashes) Decode(reader io.Reader) error
Decode implementation for BranchHashes
func (*BranchHashes) Insert ¶
func (r *BranchHashes) Insert(tx *sql.Tx) error
Insert implementation of BranchHashes
func (*BranchHashes) SetDefaults ¶
func (r *BranchHashes) SetDefaults()
SetDefaults implements set defaults for BranchHashes
func (*BranchHashes) Validate ¶
func (r *BranchHashes) Validate() error
Validate implementation of BranchHashes
type BulkBlocks ¶
type BulkBlocks struct { DatasetConfigList []DatasetConfig `json:"dataset_conf_list"` FileConfigList []FileConfig `json:"file_conf_list"` Files []File `json:"files"` ProcessingEra ProcessingEra `json:"processing_era"` PrimaryDataset PrimaryDataset `json:"primds"` Dataset Dataset `json:"dataset"` AcquisitionEra AcquisitionEra `json:"acquisition_era"` Block Block `json:"block"` FileParentList []FileParentRecord `json:"file_parent_list"` BlockParentList []BlockParent `json:"block_parent_list"` DatasetParentList []string `json:"dataset_parent_list"` // used by bulkblocks API DsParentList []DatasetParent `json:"ds_parent_list"` // provided by bulkdump API }
BulkBlocks represents bulk block structure used by `/bulkblocks` DBS API
type DBInfo ¶
type DBInfo struct { FullSize float64 IndexSize float64 Schemas []SchemaInfo Tables []TableInfo }
DBInfo represents entire database information
type DBRecord ¶
type DBRecord interface { Insert(tx *sql.Tx) error // used to insert given record to DB Validate() error // used to validate given record SetDefaults() // used to set proper defaults for given record Decode(r io.Reader) error // used to decode given record }
DBRecord interface represents general DB record used by DBS APIs. Each DBS API represents specific Table in back-end DB. And, each individual DBS API implements logic for its own DB records
type DBSError ¶
type DBSError struct { Reason string `json:"reason"` // error string Message string `json:"message"` // additional message describing the issue Function string `json:"function"` // DBS function Code int `json:"code"` // DBS error code Stacktrace string `json:"stacktrace"` // Go stack trace }
DBSError represents common structure for DBS errors
func (*DBSError) ErrorStacktrace ¶
ErrorStacktrace function implements details of DBS error message and stacktrace
type DataTiers ¶
type DataTiers struct { DATA_TIER_ID int64 `json:"data_tier_id"` DATA_TIER_NAME string `json:"data_tier_name" validate:"required,uppercase"` CREATION_DATE int64 `json:"creation_date" validate:"required,number"` CREATE_BY string `json:"create_by" validate:"required"` }
DataTiers represents data tiers DBS DB table
func (*DataTiers) SetDefaults ¶
func (r *DataTiers) SetDefaults()
SetDefaults implements set defaults for DataTiers
type Dataset ¶
type Dataset struct { DatasetID int64 `json:"dataset_id"` CreateBy string `json:"create_by"` CreationDate int64 `json:"creation_date"` PhysicsGroupName string `json:"physics_group_name"` DatasetAccessType string `json:"dataset_access_type"` DataTierName string `json:"data_tier_name"` LastModifiedBy string `json:"last_modified_by"` ProcessedDSName string `json:"processed_ds_name"` Xtcrosssection float64 `json:"xtcrosssection"` LastModificationDate int64 `json:"last_modification_date"` Dataset string `json:"dataset"` PrepID string `json:"prep_id"` }
Dataset represents dataset structure used in BulkBlocks structure
type DatasetAccessTypes ¶
type DatasetAccessTypes struct { DATASET_ACCESS_TYPE_ID int64 `json:"dataset_access_type_id"` DATASET_ACCESS_TYPE string `json:"dataset_access_type" validate:"required"` }
DatasetAccessTypes represents Dataset Access Types DBS DB table
func (*DatasetAccessTypes) Decode ¶
func (r *DatasetAccessTypes) Decode(reader io.Reader) error
Decode implementation for DatasetAccessTypes
func (*DatasetAccessTypes) Insert ¶
func (r *DatasetAccessTypes) Insert(tx *sql.Tx) error
Insert implementation of DatasetAccessTypes
func (*DatasetAccessTypes) SetDefaults ¶
func (r *DatasetAccessTypes) SetDefaults()
SetDefaults implements set defaults for DatasetAccessTypes
func (*DatasetAccessTypes) Validate ¶
func (r *DatasetAccessTypes) Validate() error
Validate implementation of DatasetAccessTypes
type DatasetConfig ¶
type DatasetConfig struct { ReleaseVersion string `json:"release_version"` PsetHash string `json:"pset_hash"` PsetName string `json:"pset_name"` AppName string `json:"app_name"` OutputModuleLabel string `json:"output_module_label"` GlobalTag string `json:"global_tag"` CreateBy string `json:"create_by"` CreationDate int64 `json:"creation_date"` }
DatasetConfig represents dataset config structure used in BulkBlocks structure
type DatasetConfigList ¶
type DatasetConfigList []DatasetConfig
DatasetConfigList represents DatasetConfig records
type DatasetOutputModConfigs ¶
type DatasetOutputModConfigs struct { DS_OUTPUT_MOD_CONF_ID int64 `json:"ds_output_mod_conf_id"` DATASET_ID int64 `json:"dataset_id" validate:"required,number,gt=0"` OUTPUT_MOD_CONFIG_ID int64 `json:"output_mod_config_id" validate:"required,number,gt=0"` }
DatasetOutputModConfigs represents dataset output mod configs DBS DB table
func (*DatasetOutputModConfigs) Decode ¶
func (r *DatasetOutputModConfigs) Decode(reader io.Reader) error
Decode implementation for DatasetOutputModConfigs
func (*DatasetOutputModConfigs) Insert ¶
func (r *DatasetOutputModConfigs) Insert(tx *sql.Tx) error
Insert implementation of DatasetOutputModConfigs
func (*DatasetOutputModConfigs) SetDefaults ¶
func (r *DatasetOutputModConfigs) SetDefaults()
SetDefaults implements set defaults for DatasetOutputModConfigs
func (*DatasetOutputModConfigs) Validate ¶
func (r *DatasetOutputModConfigs) Validate() error
Validate implementation of DatasetOutputModConfigs
type DatasetParent ¶
type DatasetParent struct { ThisDatasetID string `json:"this_dataset_id"` ParentDataset string `json:"parent_dataset"` }
DatasetParent represents dataset parent structure used in BulkBlocks structure
type DatasetParentList ¶
type DatasetParentList []string
DatasetParentList represents list of dataset parents
type DatasetParents ¶
type DatasetParents struct { THIS_DATASET_ID int64 `json:"this_dataset_id" validate:"required,number,gt=0"` PARENT_DATASET_ID int64 `json:"parent_dataset_id" validate:"required,number,gt=0"` }
DatasetParents represents Dataset Parents DBS DB table
func (*DatasetParents) Decode ¶
func (r *DatasetParents) Decode(reader io.Reader) error
Decode implementation for DatasetParents
func (*DatasetParents) Insert ¶
func (r *DatasetParents) Insert(tx *sql.Tx) error
Insert implementation of DatasetParents
func (*DatasetParents) SetDefaults ¶
func (r *DatasetParents) SetDefaults()
SetDefaults implements set defaults for DatasetParents
func (*DatasetParents) Validate ¶
func (r *DatasetParents) Validate() error
Validate implementation of DatasetParents
type DatasetRecord ¶
type DatasetRecord struct { DATASET string `json:"dataset" validate:"required"` PRIMARY_DS_NAME string `json:"primary_ds_name" validate:"required"` // PRIMARY_DS_TYPE string `json:"primary_ds_type" validate:"required"` PROCESSED_DS_NAME string `json:"processed_ds_name" validate:"required"` DATA_TIER_NAME string `json:"data_tier_name" validate:"required"` ACQUISITION_ERA_NAME string `json:"acquisition_era_name" validate:"required"` DATASET_ACCESS_TYPE string `json:"dataset_access_type" validate:"required"` PROCESSING_VERSION int64 `json:"processing_version" validate:"required,number,gt=0"` PHYSICS_GROUP_NAME string `json:"physics_group_name" validate:"required"` XTCROSSSECTION float64 `json:"xtcrosssection" validate:"required,number"` CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"` CREATE_BY string `json:"create_by" validate:"required"` LAST_MODIFICATION_DATE int64 `json:"last_modification_date" validate:"required,number,gt=0"` LAST_MODIFIED_BY string `json:"last_modified_by" validate:"required"` OUTPUT_CONFIGS []OutputConfigRecord `json:"output_configs"` }
DatasetRecord we receive for InsertDatasets API
type DatasetResponse ¶
type DatasetResponse struct { Dataset string MigrationBlocks []MigrationBlock Error error }
DatasetResponse represents response of processDatasetBlocks API
type DatasetShortRecord ¶
type DatasetShortRecord struct { Dataset string `json:"dataset"` DatasetAccessType string `json:"dataset_access_type"` }
DatasetShortRecord represents short dataset record
type Datasets ¶
type Datasets struct { DATASET_ID int64 `json:"dataset_id"` DATASET string `json:"dataset" validate:"required"` IS_DATASET_VALID int `json:"is_dataset_valid" validate:"required,number"` PRIMARY_DS_ID int64 `json:"primary_ds_id" validate:"required,number,gt=0"` PROCESSED_DS_ID int64 `json:"processed_ds_id" validate:"required,number,gt=0"` DATA_TIER_ID int64 `json:"data_tier_id" validate:"required,number,gt=0"` DATASET_ACCESS_TYPE_ID int64 `json:"dataset_access_type_id" validate:"required,number,gt=0"` ACQUISITION_ERA_ID int64 `json:"acquisition_era_id" validate:"required,number,gt=0"` PROCESSING_ERA_ID int64 `json:"processing_era_id" validate:"required,number,gt=0"` PHYSICS_GROUP_ID int64 `json:"physics_group_id" validate:"required,number,gt=0"` XTCROSSSECTION float64 `json:"xtcrosssection" validate:"required"` PREP_ID string `json:"prep_id"` CREATION_DATE int64 `json:"creation_date" validate:"required,number"` CREATE_BY string `json:"create_by" validate:"required"` LAST_MODIFICATION_DATE int64 `json:"last_modification_date" validate:"required,number"` LAST_MODIFIED_BY string `json:"last_modified_by" validate:"required"` }
Datasets represents Datasets DBS DB table
func (*Datasets) SetDefaults ¶
func (r *Datasets) SetDefaults()
SetDefaults implements set defaults for Datasets
type File ¶
type File struct { CheckSum string `json:"check_sum"` FileLumiList []FileLumi `json:"file_lumi_list"` Adler32 string `json:"adler32"` FileSize int64 `json:"file_size"` EventCount int64 `json:"event_count"` FileType string `json:"file_type"` LastModifiedBy string `json:"last_modified_by"` LastModificationDate int64 `json:"last_modification_date"` LogicalFileName string `json:"logical_file_name"` MD5 string `json:"md5"` AutoCrossSection float64 `json:"auto_cross_section"` IsFileValid int64 `json:"is_file_valid"` }
File represents file structure used in BulkBlocks structure
type FileConfig ¶
type FileConfig struct { ReleaseVersion string `json:"release_version"` PsetHash string `json:"pset_hash"` PsetName string `json:"pset_name"` LFN string `json:"lfn"` AppName string `json:"app_name"` OutputModuleLabel string `json:"output_module_label"` GlobalTag string `json:"global_tag"` CreateBy string `json:"create_by"` CreationDate int64 `json:"creation_date"` }
FileConfig represents file config structure used in BulkBlocks structure
type FileDataTypes ¶
type FileDataTypes struct { FILE_TYPE_ID int64 `json:"file_type_id"` FILE_TYPE string `json:"file_type" validate:"required"` }
FileDataTypes represents File Data Types DBS DB table
func (*FileDataTypes) Decode ¶
func (r *FileDataTypes) Decode(reader io.Reader) error
Decode implementation for FileDataTypes
func (*FileDataTypes) Insert ¶
func (r *FileDataTypes) Insert(tx *sql.Tx) error
Insert implementation of FileDataTypes
func (*FileDataTypes) SetDefaults ¶
func (r *FileDataTypes) SetDefaults()
SetDefaults implements set defaults for FileDataTypes
func (*FileDataTypes) Validate ¶
func (r *FileDataTypes) Validate() error
Validate implementation of FileDataTypes
type FileLumi ¶
type FileLumi struct { LumiSectionNumber int64 `json:"lumi_section_num"` RunNumber int64 `json:"run_num"` EventCount int64 `json:"event_count"` }
FileLumi represents file lumi structure used in File structure of BulkBlocks structure
type FileLumis ¶
type FileLumis struct { FILE_ID int64 `json:"file_id" validate:"required,number"` LUMI_SECTION_NUM int64 `json:"lumi_section_num" validate:"required,number"` RUN_NUM int64 `json:"run_num" validate:"required,number"` EVENT_COUNT int64 `json:"event_count"` }
FileLumis represents File Lumis DBS DB table
func (*FileLumis) SetDefaults ¶
func (r *FileLumis) SetDefaults()
SetDefaults implements set defaults for FileLumis
type FileOutputModConfigRecord ¶
type FileOutputModConfigRecord struct { ReleaseVersion string `json:"release_version"` PsetHash string `json:"pset_hash"` Lfn string `json:"lfn"` AppName string `json:"app_name"` OutputModuleLabel string `json:"output_module_label"` GlobalTag string `json:"global_tag"` }
FileOutputModConfigRecord represents file output mod config input record
type FileOutputModConfigs ¶
type FileOutputModConfigs struct { FILE_OUTPUT_CONFIG_ID int64 `json:"file_output_config_id"` FILE_ID int64 `json:"file_id" validate:"required,number,gt=0"` OUTPUT_MOD_CONFIG_ID int64 `json:"output_mod_config_id" validate:"required,number,gt=0"` }
FileOutputModConfigs represents file output mod config DBS DB table
func (*FileOutputModConfigs) Decode ¶
func (r *FileOutputModConfigs) Decode(reader io.Reader) error
Decode implementation for FileOutputModConfigs
func (*FileOutputModConfigs) Insert ¶
func (r *FileOutputModConfigs) Insert(tx *sql.Tx) error
Insert implementation of FileOutputModConfigs
func (*FileOutputModConfigs) SetDefaults ¶
func (r *FileOutputModConfigs) SetDefaults()
SetDefaults implements set defaults for FileOutputModConfigs
func (*FileOutputModConfigs) Validate ¶
func (r *FileOutputModConfigs) Validate() error
Validate implementation of FileOutputModConfigs
type FileParentBlockRecord ¶
type FileParentBlockRecord struct { BlockName string `json:"block_name"` ChildParentIDList [][]int64 `json:"child_parent_id_list"` MissingFiles int64 `json:"missing_files"` }
FileParentBlockRecord represents file parent DBS record BlockName: name of the block ChildParentIDList: list of child and parent file ids MissingFiles: Number of missing files in the child and parent file list
type FileParentLFNRecord ¶
type FileParentLFNRecord struct {
FILE_PARENT_LFN string `json:"file_parent_lfn"`
}
FileParentLFNRecord represents file parent recoord supplied in file parent list of FileRecord
type FileParentList ¶
type FileParentList []FileParentRecord
FileParentList represents FileParent records
type FileParentRecord ¶
type FileParentRecord struct { ThisLogicalFileName string `json:"this_logical_file_name,omitempty"` LogicalFileName string `json:"logical_file_name,omitempty"` ParentLogicalFileName string `json:"parent_logical_file_name"` ParentFileID int64 `json:"parent_file_id,omitempty"` }
FileParentRecord represents file parent DBS record used by bulkblocks API NOTE: bulkblocks API should return this_logical_file_name as it is used by DBS migrate while users, e.g. CRAB, can construct by themselves the bulkblock structure where they may use logical_file_name name Therefore, we should keep both this_logical_file_name and logical_file_name together for backward compatibility
type FileParents ¶
type FileParents struct { THIS_FILE_ID int64 `json:"this_file_id" validate:"required,number,gt=0"` PARENT_FILE_ID int64 `json:"parent_file_id" validate:"required,number"` // TODO: may need to modify to handle `none` from python }
FileParents represents file parents DBS DB table
func (*FileParents) Decode ¶
func (r *FileParents) Decode(reader io.Reader) error
Decode implementation for FileParents
func (*FileParents) Insert ¶
func (r *FileParents) Insert(tx *sql.Tx) error
Insert implementation of FileParents
func (*FileParents) SetDefaults ¶
func (r *FileParents) SetDefaults()
SetDefaults implements set defaults for FileParents
func (*FileParents) Validate ¶
func (r *FileParents) Validate() error
Validate implementation of FileParents TODO: handle this for partial parentage
type FileRecord ¶
type FileRecord struct { LOGICAL_FILE_NAME string `json:"logical_file_name"` IS_FILE_VALID int64 `json:"is_file_valid"` DATASET string `json:"dataset"` BLOCK_NAME string `json:"block_name"` FILE_TYPE string `json:"file_type"` CHECK_SUM string `json:"check_sum"` FILE_SIZE int64 `json:"file_size"` EVENT_COUNT int64 `json:"event_count"` ADLER32 string `json:"adler32"` MD5 string `json:"md5"` AUTO_CROSS_SECTION float64 `json:"auto_cross_section"` CREATION_DATE int64 `json:"creation_date"` CREATE_BY string `json:"create_by"` LAST_MODIFICATION_DATE int64 `json:"last_modification_date"` LAST_MODIFIED_BY string `json:"last_modified_by"` FILE_LUMI_LIST []FileLumi `json:"file_lumi_list"` FILE_PARENT_LIST []FileParentLFNRecord `json:"file_parent_list"` FILE_OUTPUT_CONFIG_LIST []OutputConfigRecord `json:"file_output_config"` }
FileRecord represents input record for insert files API
type Files ¶
type Files struct { FILE_ID int64 `json:"file_id"` LOGICAL_FILE_NAME string `json:"logical_file_name" validate:"required"` IS_FILE_VALID int64 `json:"is_file_valid" validate:"number"` DATASET_ID int64 `json:"dataset_id" validate:"number,gt=0"` BLOCK_ID int64 `json:"block_id" validate:"number,gt=0"` FILE_TYPE_ID int64 `json:"file_type_id" validate:"number,gt=0"` CHECK_SUM string `json:"check_sum" validate:"required"` FILE_SIZE int64 `json:"file_size" validate:"required,number,gt=0"` EVENT_COUNT int64 `json:"event_count" validate:"number"` ADLER32 string `json:"adler32" validate:"required"` MD5 string `json:"md5"` AUTO_CROSS_SECTION float64 `json:"auto_cross_section"` CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"` CREATE_BY string `json:"create_by" validate:"required"` LAST_MODIFICATION_DATE int64 `json:"last_modification_date" validate:"required,number,gt=0"` LAST_MODIFIED_BY string `json:"last_modified_by" validate:"required"` }
Files represents Files DBS DB table
func (*Files) SetDefaults ¶
func (r *Files) SetDefaults()
SetDefaults implements set defaults for Files
type Lexicon ¶
type Lexicon struct { Name string `json:"name"` Patterns []string `json:"patterns"` Length int `json:"length"` }
Lexicon represents single lexicon pattern structure
type LexiconPattern ¶
LexiconPattern represents single lexicon compiled pattern structure
type MigrationBlock ¶
MigrationBlock represent block with migration order
func GetParentBlocks ¶
func GetParentBlocks(rurl, block string, order int) ([]MigrationBlock, error)
GetParentBlocks returns parent blocks for given url and block name
func GetParentDatasetBlocks ¶
func GetParentDatasetBlocks(rurl, dataset string, order int) ([]MigrationBlock, error)
GetParentDatasetBlocks returns full list of parent blocks associated with given dataset
type MigrationBlocks ¶
type MigrationBlocks struct { MIGRATION_BLOCK_ID int64 `json:"migration_block_id" validate:"required,number,gt=0"` MIGRATION_REQUEST_ID int64 `json:"migration_request_id" validate:"required,number,gt=0"` MIGRATION_BLOCK_NAME string `json:"migration_block_name" validate:"required"` MIGRATION_ORDER int64 `json:"migration_order" validate:"gte=0"` MIGRATION_STATUS int64 `json:"migration_status" validate:"gte=0,lte=10"` CREATE_BY string `json:"create_by" validate:"required"` CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"` LAST_MODIFIED_BY string `json:"last_modified_by" validate:"required"` LAST_MODIFICATION_DATE int64 `json:"last_modification_date" validate:"required,number,gt=0"` }
MigrationBlocks represents migration blocks table
func (*MigrationBlocks) Decode ¶
func (r *MigrationBlocks) Decode(reader io.Reader) error
Decode implementation for MigrationBlocks
func (*MigrationBlocks) Insert ¶
func (r *MigrationBlocks) Insert(tx *sql.Tx) error
Insert implementation of MigrationBlocks
func (*MigrationBlocks) SetDefaults ¶
func (r *MigrationBlocks) SetDefaults()
SetDefaults implements set defaults for MigrationBlocks
func (*MigrationBlocks) Validate ¶
func (r *MigrationBlocks) Validate() error
Validate implementation of MigrationBlocks
type MigrationRemoveRequest ¶
type MigrationRemoveRequest struct {
MIGRATION_REQUEST_ID int64 `json:"migration_rqst_id"`
}
MigrationRemoveRequest represents migration remove request object
type MigrationReport ¶
type MigrationReport struct { MigrationRequest MigrationRequest `json:"migration_details"` Report string `json:"migration_report"` Status string `json:"status"` Error error `json:"error"` }
MigrationReport represents migration report returned by the migration API
type MigrationRequest ¶
type MigrationRequest struct { MIGRATION_REQUEST_ID int64 `json:"migration_request_id" validate:"required,number,gt=0"` MIGRATION_URL string `json:"migration_url" validate:"required"` MIGRATION_INPUT string `json:"migration_input" validate:"required"` MIGRATION_STATUS int64 `json:"migration_status" validate:"gte=0,lte=10"` MIGRATION_SERVER string `json:"migration_server"` CREATE_BY string `json:"create_by" validate:"required"` CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"` LAST_MODIFIED_BY string `json:"last_modified_by" validate:"required"` LAST_MODIFICATION_DATE int64 `json:"last_modification_date" validate:"required,number,gt=0"` RETRY_COUNT int64 `json:"retry_count"` }
MigrationRequest represent MigrationRequest table
func MigrationRequests ¶
func MigrationRequests(mid int64) ([]MigrationRequest, error)
MigrationRequests fetches migration requests from migration table
func (*MigrationRequest) Copy ¶
func (r *MigrationRequest) Copy() MigrationRequest
Copy creates a new copy of migration request
func (*MigrationRequest) Decode ¶
func (r *MigrationRequest) Decode(reader io.Reader) error
Decode implementation for MigrationRequest
func (*MigrationRequest) Insert ¶
func (r *MigrationRequest) Insert(tx *sql.Tx) error
Insert implementation of MigrationRequest
func (*MigrationRequest) SetDefaults ¶
func (r *MigrationRequest) SetDefaults()
SetDefaults implements set defaults for MigrationRequest
func (*MigrationRequest) Validate ¶
func (r *MigrationRequest) Validate() error
Validate implementation of MigrationRequest
type MigrationStatusRequest ¶
type MigrationStatusRequest struct { BLOCK_NAME string `json:"block_name"` DATASET string `json:"dataset"` USER string `json:"user"` }
MigrationStatusRequest defines status request structure
type ObjectPattern ¶
ObjectPattern represents interface to check different objects
type OutputConfigRecord ¶
type OutputConfigRecord struct { APP_NAME string `json:"app_name"` RELEASE_VERSION string `json:"release_version"` PSET_HASH string `json:"pset_hash"` PSET_NAME string `json:"pset_name"` GLOBAL_TAG string `json:"global_tag"` OUTPUT_MODULE_LABEL string `json:"output_module_label"` CREATION_DATE int64 `json:"creation_date"` CREATE_BY string `json:"create_by"` SCENARIO string `json:"scenario"` }
OutputConfigRecord represents input to InsertOutputConfigs API
type OutputConfigs ¶
type OutputConfigs struct { OUTPUT_MOD_CONFIG_ID int64 `json:"output_mod_config_id"` APP_EXEC_ID int64 `json:"app_exec_id" validate:"required,number,gt=0"` RELEASE_VERSION_ID int64 `json:"release_version_id" validate:"required,number,gt=0"` PARAMETER_SET_HASH_ID int64 `json:"parameter_set_hash_id" validate:"required,number,gt=0"` OUTPUT_MODULE_LABEL string `json:"output_module_label" validate:"required"` GLOBAL_TAG string `json:"global_tag" validate:"required"` SCENARIO string `json:"scenario"` CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"` CREATE_BY string `json:"create_by" validate:"required"` }
OutputConfigs represents Output Configs DBS DB table
func (*OutputConfigs) Decode ¶
func (r *OutputConfigs) Decode(reader io.Reader) error
Decode implementation for OutputConfigs
func (*OutputConfigs) Insert ¶
func (r *OutputConfigs) Insert(tx *sql.Tx) error
Insert implementation of OutputConfigs
func (*OutputConfigs) SetDefaults ¶
func (r *OutputConfigs) SetDefaults()
SetDefaults implements set defaults for OutputConfigs
func (*OutputConfigs) Size ¶
func (r *OutputConfigs) Size() int64
Size implementation for OutputConfigs
func (*OutputConfigs) Validate ¶
func (r *OutputConfigs) Validate() error
Validate implementation of OutputConfigs
type ParameterSetHashes ¶
type ParameterSetHashes struct { PARAMETER_SET_HASH_ID int64 `json:"parameter_set_hash_id"` PSET_NAME string `json:"pset_name"` PSET_HASH string `json:"pset_hash" validate:"required"` }
ParameterSetHashes represents Parameter Set Hashes DBS DB table
func (*ParameterSetHashes) Decode ¶
func (r *ParameterSetHashes) Decode(reader io.Reader) error
Decode implementation for ParameterSetHashes
func (*ParameterSetHashes) Insert ¶
func (r *ParameterSetHashes) Insert(tx *sql.Tx) error
Insert implementation of ParameterSetHashes
func (*ParameterSetHashes) SetDefaults ¶
func (r *ParameterSetHashes) SetDefaults()
SetDefaults implements set defaults for ParameterSetHashes
func (*ParameterSetHashes) Validate ¶
func (r *ParameterSetHashes) Validate() error
Validate implementation of ParameterSetHashes
type PhysicsGroups ¶
type PhysicsGroups struct { PHYSICS_GROUP_ID int64 `json:"physics_group_id"` PHYSICS_GROUP_NAME string `json:"physics_group_name" validate:"required"` }
PhysicsGroups represents Physics Groups DBS DB table
func (*PhysicsGroups) Decode ¶
func (r *PhysicsGroups) Decode(reader io.Reader) error
Decode implementation for PhysicsGroups
func (*PhysicsGroups) Insert ¶
func (r *PhysicsGroups) Insert(tx *sql.Tx) error
Insert implementation of PhysicsGroups
func (*PhysicsGroups) SetDefaults ¶
func (r *PhysicsGroups) SetDefaults()
SetDefaults implements set defaults for PhysicsGroups
func (*PhysicsGroups) Validate ¶
func (r *PhysicsGroups) Validate() error
Validate implementation of PhysicsGroups
type PrimaryDSTypes ¶
type PrimaryDSTypes struct { PRIMARY_DS_TYPE_ID int64 `json:"primary_ds_type_id"` PRIMARY_DS_TYPE string `json:"primary_ds_type" validate:"required"` }
PrimaryDSTypes represents primary ds types DBS DB table
func (*PrimaryDSTypes) Decode ¶
func (r *PrimaryDSTypes) Decode(reader io.Reader) error
Decode implementation for PrimaryDSTypes
func (*PrimaryDSTypes) Insert ¶
func (r *PrimaryDSTypes) Insert(tx *sql.Tx) error
Insert implementation of PrimaryDSTypes
func (*PrimaryDSTypes) SetDefaults ¶
func (r *PrimaryDSTypes) SetDefaults()
SetDefaults implements set defaults for PrimaryDSTypes
func (*PrimaryDSTypes) Validate ¶
func (r *PrimaryDSTypes) Validate() error
Validate implementation of PrimaryDSTypes
type PrimaryDataset ¶
type PrimaryDataset struct { PrimaryDSId int64 `json:"primary_ds_id"` CreateBy string `json:"create_by"` PrimaryDSType string `json:"primary_ds_type"` PrimaryDSName string `json:"primary_ds_name"` CreationDate int64 `json:"creation_date"` }
PrimaryDataset represents primary dataset structure used in BulkBlocks structure
type PrimaryDatasetRecord ¶
type PrimaryDatasetRecord struct { PRIMARY_DS_NAME string `json:"primary_ds_name" validate:"required"` PRIMARY_DS_TYPE string `json:"primary_ds_type" validate:"required"` CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"` CREATE_BY string `json:"create_by" validate:"required"` }
PrimaryDatasetRecord represents primary dataset record
type PrimaryDatasets ¶
type PrimaryDatasets struct { PRIMARY_DS_ID int64 `json:"primary_ds_id"` PRIMARY_DS_NAME string `json:"primary_ds_name" validate:"required"` PRIMARY_DS_TYPE_ID int64 `json:"primary_ds_type_id" validate:"required,number,gt=0"` CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"` CREATE_BY string `json:"create_by" validate:"required"` }
PrimaryDatasets represents Primary Datasets DBS DB table
func (*PrimaryDatasets) Decode ¶
func (r *PrimaryDatasets) Decode(reader io.Reader) error
Decode implementation for PrimaryDatasets
func (*PrimaryDatasets) Insert ¶
func (r *PrimaryDatasets) Insert(tx *sql.Tx) error
Insert implementation of PrimaryDatasets
func (*PrimaryDatasets) SetDefaults ¶
func (r *PrimaryDatasets) SetDefaults()
SetDefaults implements set defaults for PrimaryDatasets
func (*PrimaryDatasets) Validate ¶
func (r *PrimaryDatasets) Validate() error
Validate implementation of PrimaryDatasets
type ProcessedDatasets ¶
type ProcessedDatasets struct { PROCESSED_DS_ID int64 `json:"processed_ds_id"` PROCESSED_DS_NAME string `json:"processed_ds_name" validate:"required"` }
ProcessedDatasets represents Processed Datasets DBS DB table
func (*ProcessedDatasets) Decode ¶
func (r *ProcessedDatasets) Decode(reader io.Reader) error
Decode implementation for ProcessedDatasets
func (*ProcessedDatasets) Insert ¶
func (r *ProcessedDatasets) Insert(tx *sql.Tx) error
Insert implementation of ProcessedDatasets
func (*ProcessedDatasets) SetDefaults ¶
func (r *ProcessedDatasets) SetDefaults()
SetDefaults implements set defaults for ProcessedDatasets
func (*ProcessedDatasets) Validate ¶
func (r *ProcessedDatasets) Validate() error
Validate implementation of ProcessedDatasets
type ProcessingEra ¶
type ProcessingEra struct { CreateBy string `json:"create_by"` CreationDate int64 `json:"creation_date"` ProcessingVersion int64 `json:"processing_version"` Description string `json:"description"` }
ProcessingEra represents processing era structure used in BulkBlocks structure
type ProcessingEras ¶
type ProcessingEras struct { PROCESSING_ERA_ID int64 `json:"processing_era_id"` PROCESSING_VERSION int64 `json:"processing_version" validate:"required,number,gt=0"` CREATION_DATE int64 `json:"creation_date" validate:"required,number,gt=0"` CREATE_BY string `json:"create_by" validate:"required"` DESCRIPTION string `json:"description"` }
ProcessingEras represents Processing Eras DBS DB table
func (*ProcessingEras) Decode ¶
func (r *ProcessingEras) Decode(reader io.Reader) error
Decode implementation for ProcessingEras
func (*ProcessingEras) Insert ¶
func (r *ProcessingEras) Insert(tx *sql.Tx) error
Insert implementation of ProcessingEras
func (*ProcessingEras) SetDefaults ¶
func (r *ProcessingEras) SetDefaults()
SetDefaults implements set defaults for ProcessingEras
func (*ProcessingEras) Validate ¶
func (r *ProcessingEras) Validate() error
Validate implementation of ProcessingEras
type PyFileRecord ¶
type PyFileRecord struct {
Records []FileRecord `json:"files"`
}
PyFileRecord represents DBS python input file record structure
type Record ¶
type Record map[string]interface{}
Record represents DBS record
var DBSQL Record
DBSQL represents DBS SQL record
type ReleaseVersions ¶
type ReleaseVersions struct { RELEASE_VERSION_ID int64 `json:"release_version_id"` RELEASE_VERSION string `json:"release_version" validate:"required"` }
ReleaseVersions represents Relases Versions DBS DB table
func (*ReleaseVersions) Decode ¶
func (r *ReleaseVersions) Decode(reader io.Reader) error
Decode implementation for ReleaseVersions
func (*ReleaseVersions) Insert ¶
func (r *ReleaseVersions) Insert(tx *sql.Tx) error
Insert implementation of ReleaseVersions
func (*ReleaseVersions) SetDefaults ¶
func (r *ReleaseVersions) SetDefaults()
SetDefaults implements set defaults for ReleaseVersions
func (*ReleaseVersions) Validate ¶
func (r *ReleaseVersions) Validate() error
Validate implementation of ReleaseVersions
type SchemaIndex ¶
SchemaIndex represents schema index details
type SchemaInfo ¶
type SchemaInfo struct { Owner string Size float64 Indexes []SchemaIndex }
SchemaInfo represents schema details
type StrPattern ¶
StrPattern represents string object pattern
func (StrPattern) Check ¶
func (o StrPattern) Check(key string, val interface{}) error
Check implements ObjectPattern interface for StrPattern objects
type TLSCertsManager ¶
type TLSCertsManager struct { Certificates []tls.Certificate Time time.Time }
TLSCertsManager manages TLS certificates
func (*TLSCertsManager) TlsCerts ¶
func (t *TLSCertsManager) TlsCerts(key, cert string) ([]tls.Certificate, error)
TlsCerts provides access to TLS certificates for given key and certificate
type TableIndex ¶
TableIndex represents individual table index sizes
Source Files ¶
- acquisitioneras.go
- acquisitionerasci.go
- appexec.go
- blockchildren.go
- blockdump.go
- blockfilelumi.go
- blockorigin.go
- blockparents.go
- blocks.go
- blocksummaries.go
- branchhashes.go
- bulkblocks.go
- bulkblocks2.go
- dataset_output_mod_configs.go
- datasetaccesstypes.go
- datasetchildren.go
- datasetlist.go
- datasetparents.go
- datasets.go
- datatypes.go
- dbs.go
- dummy.go
- errors.go
- file_output_mod_configs.go
- filearray.go
- filechildren.go
- filedatatypes.go
- filelumis.go
- fileparents.go
- fileparentsbylumi.go
- files.go
- filesummaries.go
- migrate.go
- migration_blocks.go
- migration_requests.go
- migration_server.go
- outputconfigs.go
- outputmodules.go
- parameters.go
- parentdatasetfilelumi.go
- parentdstrio.go
- physicsgroups.go
- primarydatasets.go
- primarydstypes.go
- processeddatasets.go
- processingeras.go
- psethashes.go
- releaseversions.go
- runs.go
- runsummaries.go
- stats.go
- tiers.go
- utils.go
- validator.go