Documentation ¶
Index ¶
- func NewArrowRecordIterator(ctx context.Context, rpi rowscanner.ResultPageIterator, bi BatchIterator, ...) rows.ArrowBatchIterator
- func NewArrowRowScanner(resultSetMetadata *cli_service.TGetResultSetMetadataResp, ...) (rowscanner.RowScanner, dbsqlerr.DBError)
- func NewCloudBatchLoader(ctx context.Context, files []*cli_service.TSparkArrowResultLink, ...) (*batchLoader[*cloudURL], dbsqlerr.DBError)
- func NewLocalBatchLoader(ctx context.Context, batches []*cli_service.TSparkArrowBatch, ...) (*batchLoader[*localBatch], dbsqlerr.DBError)
- type BatchIterator
- type BatchLoader
- type RowValues
- type SparkArrowBatch
- type SparkArrowRecord
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func NewArrowRecordIterator ¶ added in v1.5.0
func NewArrowRecordIterator(ctx context.Context, rpi rowscanner.ResultPageIterator, bi BatchIterator, arrowSchemaBytes []byte, cfg config.Config) rows.ArrowBatchIterator
func NewArrowRowScanner ¶
func NewArrowRowScanner(resultSetMetadata *cli_service.TGetResultSetMetadataResp, rowSet *cli_service.TRowSet, cfg *config.Config, logger *dbsqllog.DBSQLLogger, ctx context.Context) (rowscanner.RowScanner, dbsqlerr.DBError)
NewArrowRowScanner returns an instance of RowScanner which handles arrow format results
func NewCloudBatchLoader ¶ added in v1.4.0
func NewCloudBatchLoader(ctx context.Context, files []*cli_service.TSparkArrowResultLink, startRowOffset int64, cfg *config.Config) (*batchLoader[*cloudURL], dbsqlerr.DBError)
func NewLocalBatchLoader ¶ added in v1.4.0
func NewLocalBatchLoader(ctx context.Context, batches []*cli_service.TSparkArrowBatch, startRowOffset int64, arrowSchemaBytes []byte, cfg *config.Config) (*batchLoader[*localBatch], dbsqlerr.DBError)
Types ¶
type BatchIterator ¶ added in v1.5.0
type BatchIterator interface { Next() (SparkArrowBatch, error) HasNext() bool Close() }
func NewBatchIterator ¶ added in v1.5.0
func NewBatchIterator(batchLoader BatchLoader) (BatchIterator, dbsqlerr.DBError)
type BatchLoader ¶ added in v1.4.0
type BatchLoader interface { rowscanner.Delimiter GetBatchFor(recordNum int64) (SparkArrowBatch, dbsqlerr.DBError) Close() }
type RowValues ¶ added in v1.5.0
type RowValues interface { rowscanner.Delimiter Close() NColumns() int SetColumnValues(columnIndex int, values arrow.ArrayData) error IsNull(columnIndex int, rowNumber int64) bool Value(columnIndex int, rowNumber int64) (any, error) SetDelimiter(d rowscanner.Delimiter) }
Abstraction for holding the values for a set of rows
func NewRowValues ¶ added in v1.5.0
func NewRowValues(d rowscanner.Delimiter, holders []columnValues) RowValues
type SparkArrowBatch ¶ added in v1.5.0
type SparkArrowBatch interface { rowscanner.Delimiter Next() (SparkArrowRecord, error) HasNext() bool Close() }
Abstraction for a set of arrow records
type SparkArrowRecord ¶ added in v1.5.0
type SparkArrowRecord interface { rowscanner.Delimiter arrow.Record }
Abstraction for an arrow record
Click to show internal directories.
Click to hide internal directories.