Documentation ¶
Index ¶
- func DeserializeBytesTensor(encodedTensor []byte, capacity int64) []string
- func DeserializeFloat32Tensor(encodedTensor []byte) []float32
- func GetOutputFromInferResponse(name string, response *inferenceserver.ModelInferResponse) (*inferenceserver.ModelInferResponse_InferOutputTensor, []byte, error)
- func ParseModel(modelMetadata *inferenceserver.ModelMetadataResponse, ...) (int64, int64, int64)
- func ReadFloat32(fourBytes []byte) float32
- func ReadInt32(fourBytes []byte) int32
- func Reshape1DArrayFloat32To3D(array []float32, shape []int64) ([][][]float32, error)
- func Reshape1DArrayStringTo2D(array []string, shape []int64) ([][]string, error)
- func SerializeBytesTensor(tensor [][]byte) []byte
- type DetectionOutput
- type KeypointOutput
- type Triton
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func DeserializeBytesTensor ¶
func GetOutputFromInferResponse ¶
func GetOutputFromInferResponse(name string, response *inferenceserver.ModelInferResponse) (*inferenceserver.ModelInferResponse_InferOutputTensor, []byte, error)
func ParseModel ¶
func ParseModel(modelMetadata *inferenceserver.ModelMetadataResponse, modelConfig *inferenceserver.ModelConfigResponse) (int64, int64, int64)
func ReadFloat32 ¶
func Reshape1DArrayStringTo2D ¶
TODO: generalise reshape functions by using interface{} arguments and returned values
func SerializeBytesTensor ¶
Types ¶
type DetectionOutput ¶
type KeypointOutput ¶
type Triton ¶
type Triton interface { ServerLiveRequest() *inferenceserver.ServerLiveResponse ServerReadyRequest() *inferenceserver.ServerReadyResponse ModelMetadataRequest(modelName string, modelInstance string) *inferenceserver.ModelMetadataResponse ModelConfigRequest(modelName string, modelInstance string) *inferenceserver.ModelConfigResponse ModelInferRequest(task modelPB.ModelInstance_Task, rawInput [][]byte, modelName string, modelInstance string, modelMetadata *inferenceserver.ModelMetadataResponse, modelConfig *inferenceserver.ModelConfigResponse) (*inferenceserver.ModelInferResponse, error) PostProcess(inferResponse *inferenceserver.ModelInferResponse, modelMetadata *inferenceserver.ModelMetadataResponse, task modelPB.ModelInstance_Task) (interface{}, error) LoadModelRequest(modelName string) (*inferenceserver.RepositoryModelLoadResponse, error) UnloadModelRequest(modelName string) (*inferenceserver.RepositoryModelUnloadResponse, error) ListModelsRequest() *inferenceserver.RepositoryIndexResponse IsTritonServerReady() bool Init() Close() }
Click to show internal directories.
Click to hide internal directories.