Documentation ¶
Overview ¶
Package conn @author: Violet-Eva @date : 2024/12/24 @notes :
Package conn @author: Violet-Eva @date : 2024/12/19 @notes :
Package conn @author: Violet-Eva @date : 2024/12/26 @notes :
Package conn @author: Violet-Eva @date : 2024/12/24 @notes :
Package conn @author: Violet-Eva @date : 2024/12/26 @notes :
Package conn @author: Violet-Eva @date : 2024/12/24 @notes :
Index ¶
- Constants
- func InitGormDB(dbType, dbName, user, passwd, host string, ...) (gormDB *gorm.DB, err error)
- func SAToTSA(structType *types.StructType, data ...any) [][]interface{}
- func StructToStructType(v interface{}, isRename bool) (*types.StructType, error)
- type Address
- type HiveConn
- func (hc *HiveConn) Close() (err error)
- func (hc *HiveConn) ExecQuery(query string) ([]map[string]interface{}, error)
- func (hc *HiveConn) ExecQueryBatchSize(query string, batchSize int, ...) (err error)
- func (hc *HiveConn) ExecQueryNoResult(query string) error
- func (hc *HiveConn) ExecQueryToStruct(query string, data any) error
- func (hc *HiveConn) GetHiveConn() (err error)
- type HiveConnInformation
- type KrbAuth
- type PgDB
- func (p *PgDB) ExecQuery(query string, args ...interface{}) (list []map[string]interface{}, err error)
- func (p *PgDB) ExecQueryNoResult(query string, args ...interface{}) (err error)
- func (p *PgDB) ExecQueryToString(query string, args ...interface{}) (list []map[string]string, err error)
- func (p *PgDB) ExecQueryToStruct(query string, data any, args ...interface{}) (err error)
- type SQLDB
- func (s *SQLDB) Close() (err error)
- func (s *SQLDB) ExecQuery(query string, args ...interface{}) (list []map[string]interface{}, err error)
- func (s *SQLDB) ExecQueryBatchProcessing(query string, batchSize int, ...) (err error)
- func (s *SQLDB) ExecQueryNoResult(query string, args ...interface{}) (err error)
- func (s *SQLDB) ExecQueryToString(query string, args ...interface{}) (list []map[string]string, err error)
- func (s *SQLDB) ExecQueryToStruct(query string, data any, args ...interface{}) (err error)
- type SparkSQL
- func (s *SparkSQL) CreateDataFrame(ctx context.Context, data [][]any, schema *types.StructType) (sql.DataFrame, error)
- func (s *SparkSQL) CreateDataFrameFromMap(v interface{}, isTag bool, isRename bool, data ...map[string]interface{}) (sql.DataFrame, error)
- func (s *SparkSQL) CreateDataFrameFromStruct(data any, isRename bool) (sql.DataFrame, error)
- func (s *SparkSQL) DFCollectBatchProcessingForInterface(df sql.DataFrame, batchSize int, ...) (err error)
- func (s *SparkSQL) DFCollectBatchProcessingForString(df sql.DataFrame, batchSize int, ...) (err error)
- func (s *SparkSQL) Exec(query string) (sql.DataFrame, error)
- func (s *SparkSQL) ExecQuery(query string) (output []map[string]interface{}, err error)
- func (s *SparkSQL) ExecQueryBatchProcessingForInterface(query string, batchSize int, ...) (err error)
- func (s *SparkSQL) ExecQueryBatchProcessingForString(query string, batchSize int, function ...func(input []map[string]string) error) (err error)
- func (s *SparkSQL) ExecQueryToMapString(query string) (output []map[string]string, err error)
Constants ¶
const ( DefaultKrbConfPath = "/etc/krb5.conf" DefaultKinitProgram = "/usr/bin/kinit" )
Variables ¶
This section is empty.
Functions ¶
func InitGormDB ¶
func SAToTSA ¶
func SAToTSA(structType *types.StructType, data ...any) [][]interface{}
SAToTSA @Description: any slice -> any 2D slicing @param structType @param data @return [][]interface{}
func StructToStructType ¶
func StructToStructType(v interface{}, isRename bool) (*types.StructType, error)
StructToStructType @Description: isTag is false , get struct elem name assign to structField name. isTag is true , get json tag name assign to structField name. @param v @param isTag @return *types.StructType @return error
Types ¶
type HiveConn ¶
type HiveConn struct { KA *KrbAuth HCI *HiveConnInformation Conn *gohive.Connection RetryCount int RetryInterval time.Duration QueryTimeout int }
func NewHiveConn ¶
func (*HiveConn) ExecQueryBatchSize ¶
func (*HiveConn) ExecQueryNoResult ¶
func (*HiveConn) ExecQueryToStruct ¶
ExecQueryToStruct @Description: @param query @param data pointer @return []map[string]interface{} @return error
func (*HiveConn) GetHiveConn ¶
type HiveConnInformation ¶
type KrbAuth ¶
type KrbAuth struct { KrbConfPath string KinitProgramPath string KeyTabFilePath string Principal string }
func NewKrbAuth ¶
type PgDB ¶
func InitPGConn ¶
func (*PgDB) ExecQueryNoResult ¶
func (*PgDB) ExecQueryToString ¶
type SQLDB ¶
func (*SQLDB) ExecQueryBatchProcessing ¶
func (*SQLDB) ExecQueryNoResult ¶
func (*SQLDB) ExecQueryToString ¶
type SparkSQL ¶
type SparkSQL struct { sql.SparkSession // contains filtered or unexported fields }
func NewSparkSQL ¶
func (*SparkSQL) CreateDataFrame ¶
func (*SparkSQL) CreateDataFrameFromMap ¶
func (s *SparkSQL) CreateDataFrameFromMap(v interface{}, isTag bool, isRename bool, data ...map[string]interface{}) (sql.DataFrame, error)
CreateDataFrameFromMap @Description: []map[string]interface{} -> sql.DataFrame , map @param ctx @param v @param isTag true ,Assign values to dataframes based on JSON tags. false, Assign values to dataframes based on elem name. This field is because it is not possible to ignore the case match to the value, and the field is added. @param isRename true ,Rename the dataframe based on the spark tag , Insufficient tags are supplemented by elem name @param data @return sql.DataFrame @return error
func (*SparkSQL) CreateDataFrameFromStruct ¶
CreateDataFrameFromStruct @Description: @param ctx @param data @param isRename == true ,Rename the dataframe based on the spark tag , Insufficient tags are supplemented by elem name @return sql.DataFrame @return error