Documentation ¶
Index ¶
- Variables
- func RegisterGRPCInferenceServiceServer(s *grpc.Server, srv GRPCInferenceServiceServer)
- type GRPCInferenceServiceClient
- type GRPCInferenceServiceServer
- type InferParameter
- func (*InferParameter) Descriptor() ([]byte, []int)deprecated
- func (x *InferParameter) GetBoolParam() bool
- func (x *InferParameter) GetInt64Param() int64
- func (m *InferParameter) GetParameterChoice() isInferParameter_ParameterChoice
- func (x *InferParameter) GetStringParam() string
- func (*InferParameter) ProtoMessage()
- func (x *InferParameter) ProtoReflect() protoreflect.Message
- func (x *InferParameter) Reset()
- func (x *InferParameter) String() string
- type InferParameter_BoolParam
- type InferParameter_Int64Param
- type InferParameter_StringParam
- type InferTensorContents
- func (*InferTensorContents) Descriptor() ([]byte, []int)deprecated
- func (x *InferTensorContents) GetBoolContents() []bool
- func (x *InferTensorContents) GetBytesContents() [][]byte
- func (x *InferTensorContents) GetFp32Contents() []float32
- func (x *InferTensorContents) GetFp64Contents() []float64
- func (x *InferTensorContents) GetInt64Contents() []int64
- func (x *InferTensorContents) GetIntContents() []int32
- func (x *InferTensorContents) GetUint64Contents() []uint64
- func (x *InferTensorContents) GetUintContents() []uint32
- func (*InferTensorContents) ProtoMessage()
- func (x *InferTensorContents) ProtoReflect() protoreflect.Message
- func (x *InferTensorContents) Reset()
- func (x *InferTensorContents) String() string
- type ModelInferRequest
- func (*ModelInferRequest) Descriptor() ([]byte, []int)deprecated
- func (x *ModelInferRequest) GetId() string
- func (x *ModelInferRequest) GetInputs() []*ModelInferRequest_InferInputTensor
- func (x *ModelInferRequest) GetModelName() string
- func (x *ModelInferRequest) GetModelVersion() string
- func (x *ModelInferRequest) GetOutputs() []*ModelInferRequest_InferRequestedOutputTensor
- func (x *ModelInferRequest) GetParameters() map[string]*InferParameter
- func (x *ModelInferRequest) GetRawInputContents() [][]byte
- func (*ModelInferRequest) ProtoMessage()
- func (x *ModelInferRequest) ProtoReflect() protoreflect.Message
- func (x *ModelInferRequest) Reset()
- func (x *ModelInferRequest) String() string
- type ModelInferRequest_InferInputTensor
- func (*ModelInferRequest_InferInputTensor) Descriptor() ([]byte, []int)deprecated
- func (x *ModelInferRequest_InferInputTensor) GetContents() *InferTensorContents
- func (x *ModelInferRequest_InferInputTensor) GetDatatype() string
- func (x *ModelInferRequest_InferInputTensor) GetName() string
- func (x *ModelInferRequest_InferInputTensor) GetParameters() map[string]*InferParameter
- func (x *ModelInferRequest_InferInputTensor) GetShape() []int64
- func (*ModelInferRequest_InferInputTensor) ProtoMessage()
- func (x *ModelInferRequest_InferInputTensor) ProtoReflect() protoreflect.Message
- func (x *ModelInferRequest_InferInputTensor) Reset()
- func (x *ModelInferRequest_InferInputTensor) String() string
- type ModelInferRequest_InferRequestedOutputTensor
- func (*ModelInferRequest_InferRequestedOutputTensor) Descriptor() ([]byte, []int)deprecated
- func (x *ModelInferRequest_InferRequestedOutputTensor) GetName() string
- func (x *ModelInferRequest_InferRequestedOutputTensor) GetParameters() map[string]*InferParameter
- func (*ModelInferRequest_InferRequestedOutputTensor) ProtoMessage()
- func (x *ModelInferRequest_InferRequestedOutputTensor) ProtoReflect() protoreflect.Message
- func (x *ModelInferRequest_InferRequestedOutputTensor) Reset()
- func (x *ModelInferRequest_InferRequestedOutputTensor) String() string
- type ModelInferResponse
- func (*ModelInferResponse) Descriptor() ([]byte, []int)deprecated
- func (x *ModelInferResponse) GetId() string
- func (x *ModelInferResponse) GetModelName() string
- func (x *ModelInferResponse) GetModelVersion() string
- func (x *ModelInferResponse) GetOutputs() []*ModelInferResponse_InferOutputTensor
- func (x *ModelInferResponse) GetParameters() map[string]*InferParameter
- func (x *ModelInferResponse) GetRawOutputContents() [][]byte
- func (*ModelInferResponse) ProtoMessage()
- func (x *ModelInferResponse) ProtoReflect() protoreflect.Message
- func (x *ModelInferResponse) Reset()
- func (x *ModelInferResponse) String() string
- type ModelInferResponse_InferOutputTensor
- func (*ModelInferResponse_InferOutputTensor) Descriptor() ([]byte, []int)deprecated
- func (x *ModelInferResponse_InferOutputTensor) GetContents() *InferTensorContents
- func (x *ModelInferResponse_InferOutputTensor) GetDatatype() string
- func (x *ModelInferResponse_InferOutputTensor) GetName() string
- func (x *ModelInferResponse_InferOutputTensor) GetParameters() map[string]*InferParameter
- func (x *ModelInferResponse_InferOutputTensor) GetShape() []int64
- func (*ModelInferResponse_InferOutputTensor) ProtoMessage()
- func (x *ModelInferResponse_InferOutputTensor) ProtoReflect() protoreflect.Message
- func (x *ModelInferResponse_InferOutputTensor) Reset()
- func (x *ModelInferResponse_InferOutputTensor) String() string
- type ModelMetadataRequest
- func (*ModelMetadataRequest) Descriptor() ([]byte, []int)deprecated
- func (x *ModelMetadataRequest) GetName() string
- func (x *ModelMetadataRequest) GetVersion() string
- func (*ModelMetadataRequest) ProtoMessage()
- func (x *ModelMetadataRequest) ProtoReflect() protoreflect.Message
- func (x *ModelMetadataRequest) Reset()
- func (x *ModelMetadataRequest) String() string
- type ModelMetadataResponse
- func (*ModelMetadataResponse) Descriptor() ([]byte, []int)deprecated
- func (x *ModelMetadataResponse) GetInputs() []*ModelMetadataResponse_TensorMetadata
- func (x *ModelMetadataResponse) GetName() string
- func (x *ModelMetadataResponse) GetOutputs() []*ModelMetadataResponse_TensorMetadata
- func (x *ModelMetadataResponse) GetPlatform() string
- func (x *ModelMetadataResponse) GetVersions() []string
- func (*ModelMetadataResponse) ProtoMessage()
- func (x *ModelMetadataResponse) ProtoReflect() protoreflect.Message
- func (x *ModelMetadataResponse) Reset()
- func (x *ModelMetadataResponse) String() string
- type ModelMetadataResponse_TensorMetadata
- func (*ModelMetadataResponse_TensorMetadata) Descriptor() ([]byte, []int)deprecated
- func (x *ModelMetadataResponse_TensorMetadata) GetDatatype() string
- func (x *ModelMetadataResponse_TensorMetadata) GetName() string
- func (x *ModelMetadataResponse_TensorMetadata) GetShape() []int64
- func (*ModelMetadataResponse_TensorMetadata) ProtoMessage()
- func (x *ModelMetadataResponse_TensorMetadata) ProtoReflect() protoreflect.Message
- func (x *ModelMetadataResponse_TensorMetadata) Reset()
- func (x *ModelMetadataResponse_TensorMetadata) String() string
- type ModelReadyRequest
- func (*ModelReadyRequest) Descriptor() ([]byte, []int)deprecated
- func (x *ModelReadyRequest) GetName() string
- func (x *ModelReadyRequest) GetVersion() string
- func (*ModelReadyRequest) ProtoMessage()
- func (x *ModelReadyRequest) ProtoReflect() protoreflect.Message
- func (x *ModelReadyRequest) Reset()
- func (x *ModelReadyRequest) String() string
- type ModelReadyResponse
- type ServerLiveRequest
- type ServerLiveResponse
- type ServerMetadataRequest
- type ServerMetadataResponse
- func (*ServerMetadataResponse) Descriptor() ([]byte, []int)deprecated
- func (x *ServerMetadataResponse) GetExtensions() []string
- func (x *ServerMetadataResponse) GetName() string
- func (x *ServerMetadataResponse) GetVersion() string
- func (*ServerMetadataResponse) ProtoMessage()
- func (x *ServerMetadataResponse) ProtoReflect() protoreflect.Message
- func (x *ServerMetadataResponse) Reset()
- func (x *ServerMetadataResponse) String() string
- type ServerReadyRequest
- type ServerReadyResponse
- func (*ServerReadyResponse) Descriptor() ([]byte, []int)deprecated
- func (x *ServerReadyResponse) GetReady() bool
- func (*ServerReadyResponse) ProtoMessage()
- func (x *ServerReadyResponse) ProtoReflect() protoreflect.Message
- func (x *ServerReadyResponse) Reset()
- func (x *ServerReadyResponse) String() string
- type UnimplementedGRPCInferenceServiceServer
- func (*UnimplementedGRPCInferenceServiceServer) ModelInfer(context.Context, *ModelInferRequest) (*ModelInferResponse, error)
- func (*UnimplementedGRPCInferenceServiceServer) ModelMetadata(context.Context, *ModelMetadataRequest) (*ModelMetadataResponse, error)
- func (*UnimplementedGRPCInferenceServiceServer) ModelReady(context.Context, *ModelReadyRequest) (*ModelReadyResponse, error)
- func (*UnimplementedGRPCInferenceServiceServer) ServerLive(context.Context, *ServerLiveRequest) (*ServerLiveResponse, error)
- func (*UnimplementedGRPCInferenceServiceServer) ServerMetadata(context.Context, *ServerMetadataRequest) (*ServerMetadataResponse, error)
- func (*UnimplementedGRPCInferenceServiceServer) ServerReady(context.Context, *ServerReadyRequest) (*ServerReadyResponse, error)
Constants ¶
This section is empty.
Variables ¶
var File_kfs_inference_v2_proto protoreflect.FileDescriptor
Functions ¶
func RegisterGRPCInferenceServiceServer ¶
func RegisterGRPCInferenceServiceServer(s *grpc.Server, srv GRPCInferenceServiceServer)
Types ¶
type GRPCInferenceServiceClient ¶
type GRPCInferenceServiceClient interface { // The ServerLive API indicates if the inference server is able to receive // and respond to metadata and inference requests. ServerLive(ctx context.Context, in *ServerLiveRequest, opts ...grpc.CallOption) (*ServerLiveResponse, error) // The ServerReady API indicates if the server is ready for inferencing. ServerReady(ctx context.Context, in *ServerReadyRequest, opts ...grpc.CallOption) (*ServerReadyResponse, error) // The ModelReady API indicates if a specific model is ready for inferencing. ModelReady(ctx context.Context, in *ModelReadyRequest, opts ...grpc.CallOption) (*ModelReadyResponse, error) // The ServerMetadata API provides information about the server. Errors are // indicated by the google.rpc.Status returned for the request. The OK code // indicates success and other codes indicate failure. ServerMetadata(ctx context.Context, in *ServerMetadataRequest, opts ...grpc.CallOption) (*ServerMetadataResponse, error) // The per-model metadata API provides information about a model. Errors are // indicated by the google.rpc.Status returned for the request. The OK code // indicates success and other codes indicate failure. ModelMetadata(ctx context.Context, in *ModelMetadataRequest, opts ...grpc.CallOption) (*ModelMetadataResponse, error) // The ModelInfer API performs inference using the specified model. Errors are // indicated by the google.rpc.Status returned for the request. The OK code // indicates success and other codes indicate failure. ModelInfer(ctx context.Context, in *ModelInferRequest, opts ...grpc.CallOption) (*ModelInferResponse, error) }
GRPCInferenceServiceClient is the client API for GRPCInferenceService service.
For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
func NewGRPCInferenceServiceClient ¶
func NewGRPCInferenceServiceClient(cc grpc.ClientConnInterface) GRPCInferenceServiceClient
type GRPCInferenceServiceServer ¶
type GRPCInferenceServiceServer interface { // The ServerLive API indicates if the inference server is able to receive // and respond to metadata and inference requests. ServerLive(context.Context, *ServerLiveRequest) (*ServerLiveResponse, error) // The ServerReady API indicates if the server is ready for inferencing. ServerReady(context.Context, *ServerReadyRequest) (*ServerReadyResponse, error) // The ModelReady API indicates if a specific model is ready for inferencing. ModelReady(context.Context, *ModelReadyRequest) (*ModelReadyResponse, error) // The ServerMetadata API provides information about the server. Errors are // indicated by the google.rpc.Status returned for the request. The OK code // indicates success and other codes indicate failure. ServerMetadata(context.Context, *ServerMetadataRequest) (*ServerMetadataResponse, error) // The per-model metadata API provides information about a model. Errors are // indicated by the google.rpc.Status returned for the request. The OK code // indicates success and other codes indicate failure. ModelMetadata(context.Context, *ModelMetadataRequest) (*ModelMetadataResponse, error) // The ModelInfer API performs inference using the specified model. Errors are // indicated by the google.rpc.Status returned for the request. The OK code // indicates success and other codes indicate failure. ModelInfer(context.Context, *ModelInferRequest) (*ModelInferResponse, error) }
GRPCInferenceServiceServer is the server API for GRPCInferenceService service.
type InferParameter ¶
type InferParameter struct { // The parameter value can be a string, an int64, a boolean // or a message specific to a predefined parameter. // // Types that are assignable to ParameterChoice: // *InferParameter_BoolParam // *InferParameter_Int64Param // *InferParameter_StringParam ParameterChoice isInferParameter_ParameterChoice `protobuf_oneof:"parameter_choice"` // contains filtered or unexported fields }
An inference parameter value. The Parameters message describes a “name”/”value” pair, where the “name” is the name of the parameter and the “value” is a boolean, integer, or string corresponding to the parameter.
func (*InferParameter) Descriptor
deprecated
func (*InferParameter) Descriptor() ([]byte, []int)
Deprecated: Use InferParameter.ProtoReflect.Descriptor instead.
func (*InferParameter) GetBoolParam ¶
func (x *InferParameter) GetBoolParam() bool
func (*InferParameter) GetInt64Param ¶
func (x *InferParameter) GetInt64Param() int64
func (*InferParameter) GetParameterChoice ¶
func (m *InferParameter) GetParameterChoice() isInferParameter_ParameterChoice
func (*InferParameter) GetStringParam ¶
func (x *InferParameter) GetStringParam() string
func (*InferParameter) ProtoMessage ¶
func (*InferParameter) ProtoMessage()
func (*InferParameter) ProtoReflect ¶
func (x *InferParameter) ProtoReflect() protoreflect.Message
func (*InferParameter) Reset ¶
func (x *InferParameter) Reset()
func (*InferParameter) String ¶
func (x *InferParameter) String() string
type InferParameter_BoolParam ¶
type InferParameter_BoolParam struct { // A boolean parameter value. BoolParam bool `protobuf:"varint,1,opt,name=bool_param,json=boolParam,proto3,oneof"` }
type InferParameter_Int64Param ¶
type InferParameter_Int64Param struct { // An int64 parameter value. Int64Param int64 `protobuf:"varint,2,opt,name=int64_param,json=int64Param,proto3,oneof"` }
type InferParameter_StringParam ¶
type InferParameter_StringParam struct { // A string parameter value. StringParam string `protobuf:"bytes,3,opt,name=string_param,json=stringParam,proto3,oneof"` }
type InferTensorContents ¶
type InferTensorContents struct { // Representation for BOOL data type. The size must match what is // expected by the tensor's shape. The contents must be the flattened, // one-dimensional, row-major order of the tensor elements. BoolContents []bool `protobuf:"varint,1,rep,packed,name=bool_contents,json=boolContents,proto3" json:"bool_contents,omitempty"` // Representation for INT8, INT16, and INT32 data types. The size // must match what is expected by the tensor's shape. The contents // must be the flattened, one-dimensional, row-major order of the // tensor elements. IntContents []int32 `protobuf:"varint,2,rep,packed,name=int_contents,json=intContents,proto3" json:"int_contents,omitempty"` // Representation for INT64 data types. The size must match what // is expected by the tensor's shape. The contents must be the // flattened, one-dimensional, row-major order of the tensor elements. Int64Contents []int64 `protobuf:"varint,3,rep,packed,name=int64_contents,json=int64Contents,proto3" json:"int64_contents,omitempty"` // Representation for UINT8, UINT16, and UINT32 data types. The size // must match what is expected by the tensor's shape. The contents // must be the flattened, one-dimensional, row-major order of the // tensor elements. UintContents []uint32 `protobuf:"varint,4,rep,packed,name=uint_contents,json=uintContents,proto3" json:"uint_contents,omitempty"` // Representation for UINT64 data types. The size must match what // is expected by the tensor's shape. The contents must be the // flattened, one-dimensional, row-major order of the tensor elements. Uint64Contents []uint64 `protobuf:"varint,5,rep,packed,name=uint64_contents,json=uint64Contents,proto3" json:"uint64_contents,omitempty"` // Representation for FP32 data type. The size must match what is // expected by the tensor's shape. The contents must be the flattened, // one-dimensional, row-major order of the tensor elements. Fp32Contents []float32 `protobuf:"fixed32,6,rep,packed,name=fp32_contents,json=fp32Contents,proto3" json:"fp32_contents,omitempty"` // Representation for FP64 data type. The size must match what is // expected by the tensor's shape. The contents must be the flattened, // one-dimensional, row-major order of the tensor elements. Fp64Contents []float64 `protobuf:"fixed64,7,rep,packed,name=fp64_contents,json=fp64Contents,proto3" json:"fp64_contents,omitempty"` // Representation for BYTES data type. The size must match what is // expected by the tensor's shape. The contents must be the flattened, // one-dimensional, row-major order of the tensor elements. BytesContents [][]byte `protobuf:"bytes,8,rep,name=bytes_contents,json=bytesContents,proto3" json:"bytes_contents,omitempty"` // contains filtered or unexported fields }
The data contained in a tensor represented by the repeated type that matches the tensor's data type. Protobuf oneof is not used because oneofs cannot contain repeated fields.
func (*InferTensorContents) Descriptor
deprecated
func (*InferTensorContents) Descriptor() ([]byte, []int)
Deprecated: Use InferTensorContents.ProtoReflect.Descriptor instead.
func (*InferTensorContents) GetBoolContents ¶
func (x *InferTensorContents) GetBoolContents() []bool
func (*InferTensorContents) GetBytesContents ¶
func (x *InferTensorContents) GetBytesContents() [][]byte
func (*InferTensorContents) GetFp32Contents ¶
func (x *InferTensorContents) GetFp32Contents() []float32
func (*InferTensorContents) GetFp64Contents ¶
func (x *InferTensorContents) GetFp64Contents() []float64
func (*InferTensorContents) GetInt64Contents ¶
func (x *InferTensorContents) GetInt64Contents() []int64
func (*InferTensorContents) GetIntContents ¶
func (x *InferTensorContents) GetIntContents() []int32
func (*InferTensorContents) GetUint64Contents ¶
func (x *InferTensorContents) GetUint64Contents() []uint64
func (*InferTensorContents) GetUintContents ¶
func (x *InferTensorContents) GetUintContents() []uint32
func (*InferTensorContents) ProtoMessage ¶
func (*InferTensorContents) ProtoMessage()
func (*InferTensorContents) ProtoReflect ¶
func (x *InferTensorContents) ProtoReflect() protoreflect.Message
func (*InferTensorContents) Reset ¶
func (x *InferTensorContents) Reset()
func (*InferTensorContents) String ¶
func (x *InferTensorContents) String() string
type ModelInferRequest ¶
type ModelInferRequest struct { // The name of the model to use for inferencing. ModelName string `protobuf:"bytes,1,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` // The version of the model to use for inference. If not given the // server will choose a version based on the model and internal policy. ModelVersion string `protobuf:"bytes,2,opt,name=model_version,json=modelVersion,proto3" json:"model_version,omitempty"` // Optional identifier for the request. If specified will be // returned in the response. Id string `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` // Optional inference parameters. Parameters map[string]*InferParameter `` /* 161-byte string literal not displayed */ // The input tensors for the inference. Inputs []*ModelInferRequest_InferInputTensor `protobuf:"bytes,5,rep,name=inputs,proto3" json:"inputs,omitempty"` // The requested output tensors for the inference. Optional, if not // specified all outputs produced by the model will be returned. Outputs []*ModelInferRequest_InferRequestedOutputTensor `protobuf:"bytes,6,rep,name=outputs,proto3" json:"outputs,omitempty"` // The data contained in an input tensor can be represented in "raw" // bytes form or in the repeated type that matches the tensor's data // type. To use the raw representation 'raw_input_contents' must be // initialized with data for each tensor in the same order as // 'inputs'. For each tensor, the size of this content must match // what is expected by the tensor's shape and data type. The raw // data must be the flattened, one-dimensional, row-major order of // the tensor elements without any stride or padding between the // elements. Note that the FP16 data type must be represented as raw // content as there is no specific data type for a 16-bit float // type. // // If this field is specified then InferInputTensor::contents must // not be specified for any input tensor. RawInputContents [][]byte `protobuf:"bytes,7,rep,name=raw_input_contents,json=rawInputContents,proto3" json:"raw_input_contents,omitempty"` // contains filtered or unexported fields }
func (*ModelInferRequest) Descriptor
deprecated
func (*ModelInferRequest) Descriptor() ([]byte, []int)
Deprecated: Use ModelInferRequest.ProtoReflect.Descriptor instead.
func (*ModelInferRequest) GetId ¶
func (x *ModelInferRequest) GetId() string
func (*ModelInferRequest) GetInputs ¶
func (x *ModelInferRequest) GetInputs() []*ModelInferRequest_InferInputTensor
func (*ModelInferRequest) GetModelName ¶
func (x *ModelInferRequest) GetModelName() string
func (*ModelInferRequest) GetModelVersion ¶
func (x *ModelInferRequest) GetModelVersion() string
func (*ModelInferRequest) GetOutputs ¶
func (x *ModelInferRequest) GetOutputs() []*ModelInferRequest_InferRequestedOutputTensor
func (*ModelInferRequest) GetParameters ¶
func (x *ModelInferRequest) GetParameters() map[string]*InferParameter
func (*ModelInferRequest) GetRawInputContents ¶
func (x *ModelInferRequest) GetRawInputContents() [][]byte
func (*ModelInferRequest) ProtoMessage ¶
func (*ModelInferRequest) ProtoMessage()
func (*ModelInferRequest) ProtoReflect ¶
func (x *ModelInferRequest) ProtoReflect() protoreflect.Message
func (*ModelInferRequest) Reset ¶
func (x *ModelInferRequest) Reset()
func (*ModelInferRequest) String ¶
func (x *ModelInferRequest) String() string
type ModelInferRequest_InferInputTensor ¶
type ModelInferRequest_InferInputTensor struct { // The tensor name. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The tensor data type. Datatype string `protobuf:"bytes,2,opt,name=datatype,proto3" json:"datatype,omitempty"` // The tensor shape. Shape []int64 `protobuf:"varint,3,rep,packed,name=shape,proto3" json:"shape,omitempty"` // Optional inference input tensor parameters. Parameters map[string]*InferParameter `` /* 161-byte string literal not displayed */ // The tensor contents using a data-type format. This field must // not be specified if "raw" tensor contents are being used for // the inference request. Contents *InferTensorContents `protobuf:"bytes,5,opt,name=contents,proto3" json:"contents,omitempty"` // contains filtered or unexported fields }
An input tensor for an inference request.
func (*ModelInferRequest_InferInputTensor) Descriptor
deprecated
func (*ModelInferRequest_InferInputTensor) Descriptor() ([]byte, []int)
Deprecated: Use ModelInferRequest_InferInputTensor.ProtoReflect.Descriptor instead.
func (*ModelInferRequest_InferInputTensor) GetContents ¶
func (x *ModelInferRequest_InferInputTensor) GetContents() *InferTensorContents
func (*ModelInferRequest_InferInputTensor) GetDatatype ¶
func (x *ModelInferRequest_InferInputTensor) GetDatatype() string
func (*ModelInferRequest_InferInputTensor) GetName ¶
func (x *ModelInferRequest_InferInputTensor) GetName() string
func (*ModelInferRequest_InferInputTensor) GetParameters ¶
func (x *ModelInferRequest_InferInputTensor) GetParameters() map[string]*InferParameter
func (*ModelInferRequest_InferInputTensor) GetShape ¶
func (x *ModelInferRequest_InferInputTensor) GetShape() []int64
func (*ModelInferRequest_InferInputTensor) ProtoMessage ¶
func (*ModelInferRequest_InferInputTensor) ProtoMessage()
func (*ModelInferRequest_InferInputTensor) ProtoReflect ¶
func (x *ModelInferRequest_InferInputTensor) ProtoReflect() protoreflect.Message
func (*ModelInferRequest_InferInputTensor) Reset ¶
func (x *ModelInferRequest_InferInputTensor) Reset()
func (*ModelInferRequest_InferInputTensor) String ¶
func (x *ModelInferRequest_InferInputTensor) String() string
type ModelInferRequest_InferRequestedOutputTensor ¶
type ModelInferRequest_InferRequestedOutputTensor struct { // The tensor name. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Optional requested output tensor parameters. Parameters map[string]*InferParameter `` /* 161-byte string literal not displayed */ // contains filtered or unexported fields }
An output tensor requested for an inference request.
func (*ModelInferRequest_InferRequestedOutputTensor) Descriptor
deprecated
func (*ModelInferRequest_InferRequestedOutputTensor) Descriptor() ([]byte, []int)
Deprecated: Use ModelInferRequest_InferRequestedOutputTensor.ProtoReflect.Descriptor instead.
func (*ModelInferRequest_InferRequestedOutputTensor) GetName ¶
func (x *ModelInferRequest_InferRequestedOutputTensor) GetName() string
func (*ModelInferRequest_InferRequestedOutputTensor) GetParameters ¶
func (x *ModelInferRequest_InferRequestedOutputTensor) GetParameters() map[string]*InferParameter
func (*ModelInferRequest_InferRequestedOutputTensor) ProtoMessage ¶
func (*ModelInferRequest_InferRequestedOutputTensor) ProtoMessage()
func (*ModelInferRequest_InferRequestedOutputTensor) ProtoReflect ¶
func (x *ModelInferRequest_InferRequestedOutputTensor) ProtoReflect() protoreflect.Message
func (*ModelInferRequest_InferRequestedOutputTensor) Reset ¶
func (x *ModelInferRequest_InferRequestedOutputTensor) Reset()
func (*ModelInferRequest_InferRequestedOutputTensor) String ¶
func (x *ModelInferRequest_InferRequestedOutputTensor) String() string
type ModelInferResponse ¶
type ModelInferResponse struct { // The name of the model used for inference. ModelName string `protobuf:"bytes,1,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` // The version of the model used for inference. ModelVersion string `protobuf:"bytes,2,opt,name=model_version,json=modelVersion,proto3" json:"model_version,omitempty"` // The id of the inference request if one was specified. Id string `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` // Optional inference response parameters. Parameters map[string]*InferParameter `` /* 161-byte string literal not displayed */ // The output tensors holding inference results. Outputs []*ModelInferResponse_InferOutputTensor `protobuf:"bytes,5,rep,name=outputs,proto3" json:"outputs,omitempty"` // The data contained in an output tensor can be represented in // "raw" bytes form or in the repeated type that matches the // tensor's data type. To use the raw representation 'raw_output_contents' // must be initialized with data for each tensor in the same order as // 'outputs'. For each tensor, the size of this content must match // what is expected by the tensor's shape and data type. The raw // data must be the flattened, one-dimensional, row-major order of // the tensor elements without any stride or padding between the // elements. Note that the FP16 data type must be represented as raw // content as there is no specific data type for a 16-bit float // type. // // If this field is specified then InferOutputTensor::contents must // not be specified for any output tensor. RawOutputContents [][]byte `protobuf:"bytes,6,rep,name=raw_output_contents,json=rawOutputContents,proto3" json:"raw_output_contents,omitempty"` // contains filtered or unexported fields }
func (*ModelInferResponse) Descriptor
deprecated
func (*ModelInferResponse) Descriptor() ([]byte, []int)
Deprecated: Use ModelInferResponse.ProtoReflect.Descriptor instead.
func (*ModelInferResponse) GetId ¶
func (x *ModelInferResponse) GetId() string
func (*ModelInferResponse) GetModelName ¶
func (x *ModelInferResponse) GetModelName() string
func (*ModelInferResponse) GetModelVersion ¶
func (x *ModelInferResponse) GetModelVersion() string
func (*ModelInferResponse) GetOutputs ¶
func (x *ModelInferResponse) GetOutputs() []*ModelInferResponse_InferOutputTensor
func (*ModelInferResponse) GetParameters ¶
func (x *ModelInferResponse) GetParameters() map[string]*InferParameter
func (*ModelInferResponse) GetRawOutputContents ¶
func (x *ModelInferResponse) GetRawOutputContents() [][]byte
func (*ModelInferResponse) ProtoMessage ¶
func (*ModelInferResponse) ProtoMessage()
func (*ModelInferResponse) ProtoReflect ¶
func (x *ModelInferResponse) ProtoReflect() protoreflect.Message
func (*ModelInferResponse) Reset ¶
func (x *ModelInferResponse) Reset()
func (*ModelInferResponse) String ¶
func (x *ModelInferResponse) String() string
type ModelInferResponse_InferOutputTensor ¶
type ModelInferResponse_InferOutputTensor struct { // The tensor name. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The tensor data type. Datatype string `protobuf:"bytes,2,opt,name=datatype,proto3" json:"datatype,omitempty"` // The tensor shape. Shape []int64 `protobuf:"varint,3,rep,packed,name=shape,proto3" json:"shape,omitempty"` // Optional output tensor parameters. Parameters map[string]*InferParameter `` /* 161-byte string literal not displayed */ // The tensor contents using a data-type format. This field must // not be specified if "raw" tensor contents are being used for // the inference response. Contents *InferTensorContents `protobuf:"bytes,5,opt,name=contents,proto3" json:"contents,omitempty"` // contains filtered or unexported fields }
An output tensor returned for an inference request.
func (*ModelInferResponse_InferOutputTensor) Descriptor
deprecated
func (*ModelInferResponse_InferOutputTensor) Descriptor() ([]byte, []int)
Deprecated: Use ModelInferResponse_InferOutputTensor.ProtoReflect.Descriptor instead.
func (*ModelInferResponse_InferOutputTensor) GetContents ¶
func (x *ModelInferResponse_InferOutputTensor) GetContents() *InferTensorContents
func (*ModelInferResponse_InferOutputTensor) GetDatatype ¶
func (x *ModelInferResponse_InferOutputTensor) GetDatatype() string
func (*ModelInferResponse_InferOutputTensor) GetName ¶
func (x *ModelInferResponse_InferOutputTensor) GetName() string
func (*ModelInferResponse_InferOutputTensor) GetParameters ¶
func (x *ModelInferResponse_InferOutputTensor) GetParameters() map[string]*InferParameter
func (*ModelInferResponse_InferOutputTensor) GetShape ¶
func (x *ModelInferResponse_InferOutputTensor) GetShape() []int64
func (*ModelInferResponse_InferOutputTensor) ProtoMessage ¶
func (*ModelInferResponse_InferOutputTensor) ProtoMessage()
func (*ModelInferResponse_InferOutputTensor) ProtoReflect ¶
func (x *ModelInferResponse_InferOutputTensor) ProtoReflect() protoreflect.Message
func (*ModelInferResponse_InferOutputTensor) Reset ¶
func (x *ModelInferResponse_InferOutputTensor) Reset()
func (*ModelInferResponse_InferOutputTensor) String ¶
func (x *ModelInferResponse_InferOutputTensor) String() string
type ModelMetadataRequest ¶
type ModelMetadataRequest struct { // The name of the model. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The version of the model to check for readiness. If not given the // server will choose a version based on the model and internal policy. Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` // contains filtered or unexported fields }
func (*ModelMetadataRequest) Descriptor
deprecated
func (*ModelMetadataRequest) Descriptor() ([]byte, []int)
Deprecated: Use ModelMetadataRequest.ProtoReflect.Descriptor instead.
func (*ModelMetadataRequest) GetName ¶
func (x *ModelMetadataRequest) GetName() string
func (*ModelMetadataRequest) GetVersion ¶
func (x *ModelMetadataRequest) GetVersion() string
func (*ModelMetadataRequest) ProtoMessage ¶
func (*ModelMetadataRequest) ProtoMessage()
func (*ModelMetadataRequest) ProtoReflect ¶
func (x *ModelMetadataRequest) ProtoReflect() protoreflect.Message
func (*ModelMetadataRequest) Reset ¶
func (x *ModelMetadataRequest) Reset()
func (*ModelMetadataRequest) String ¶
func (x *ModelMetadataRequest) String() string
type ModelMetadataResponse ¶
type ModelMetadataResponse struct { // The model name. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The versions of the model available on the server. Versions []string `protobuf:"bytes,2,rep,name=versions,proto3" json:"versions,omitempty"` // The model's platform. See Platforms. Platform string `protobuf:"bytes,3,opt,name=platform,proto3" json:"platform,omitempty"` // The model's inputs. Inputs []*ModelMetadataResponse_TensorMetadata `protobuf:"bytes,4,rep,name=inputs,proto3" json:"inputs,omitempty"` // The model's outputs. Outputs []*ModelMetadataResponse_TensorMetadata `protobuf:"bytes,5,rep,name=outputs,proto3" json:"outputs,omitempty"` // contains filtered or unexported fields }
func (*ModelMetadataResponse) Descriptor
deprecated
func (*ModelMetadataResponse) Descriptor() ([]byte, []int)
Deprecated: Use ModelMetadataResponse.ProtoReflect.Descriptor instead.
func (*ModelMetadataResponse) GetInputs ¶
func (x *ModelMetadataResponse) GetInputs() []*ModelMetadataResponse_TensorMetadata
func (*ModelMetadataResponse) GetName ¶
func (x *ModelMetadataResponse) GetName() string
func (*ModelMetadataResponse) GetOutputs ¶
func (x *ModelMetadataResponse) GetOutputs() []*ModelMetadataResponse_TensorMetadata
func (*ModelMetadataResponse) GetPlatform ¶
func (x *ModelMetadataResponse) GetPlatform() string
func (*ModelMetadataResponse) GetVersions ¶
func (x *ModelMetadataResponse) GetVersions() []string
func (*ModelMetadataResponse) ProtoMessage ¶
func (*ModelMetadataResponse) ProtoMessage()
func (*ModelMetadataResponse) ProtoReflect ¶
func (x *ModelMetadataResponse) ProtoReflect() protoreflect.Message
func (*ModelMetadataResponse) Reset ¶
func (x *ModelMetadataResponse) Reset()
func (*ModelMetadataResponse) String ¶
func (x *ModelMetadataResponse) String() string
type ModelMetadataResponse_TensorMetadata ¶
type ModelMetadataResponse_TensorMetadata struct { // The tensor name. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The tensor data type. Datatype string `protobuf:"bytes,2,opt,name=datatype,proto3" json:"datatype,omitempty"` // The tensor shape. A variable-size dimension is represented // by a -1 value. Shape []int64 `protobuf:"varint,3,rep,packed,name=shape,proto3" json:"shape,omitempty"` // contains filtered or unexported fields }
Metadata for a tensor.
func (*ModelMetadataResponse_TensorMetadata) Descriptor
deprecated
func (*ModelMetadataResponse_TensorMetadata) Descriptor() ([]byte, []int)
Deprecated: Use ModelMetadataResponse_TensorMetadata.ProtoReflect.Descriptor instead.
func (*ModelMetadataResponse_TensorMetadata) GetDatatype ¶
func (x *ModelMetadataResponse_TensorMetadata) GetDatatype() string
func (*ModelMetadataResponse_TensorMetadata) GetName ¶
func (x *ModelMetadataResponse_TensorMetadata) GetName() string
func (*ModelMetadataResponse_TensorMetadata) GetShape ¶
func (x *ModelMetadataResponse_TensorMetadata) GetShape() []int64
func (*ModelMetadataResponse_TensorMetadata) ProtoMessage ¶
func (*ModelMetadataResponse_TensorMetadata) ProtoMessage()
func (*ModelMetadataResponse_TensorMetadata) ProtoReflect ¶
func (x *ModelMetadataResponse_TensorMetadata) ProtoReflect() protoreflect.Message
func (*ModelMetadataResponse_TensorMetadata) Reset ¶
func (x *ModelMetadataResponse_TensorMetadata) Reset()
func (*ModelMetadataResponse_TensorMetadata) String ¶
func (x *ModelMetadataResponse_TensorMetadata) String() string
type ModelReadyRequest ¶
type ModelReadyRequest struct { // The name of the model to check for readiness. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The version of the model to check for readiness. If not given the // server will choose a version based on the model and internal policy. Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` // contains filtered or unexported fields }
func (*ModelReadyRequest) Descriptor
deprecated
func (*ModelReadyRequest) Descriptor() ([]byte, []int)
Deprecated: Use ModelReadyRequest.ProtoReflect.Descriptor instead.
func (*ModelReadyRequest) GetName ¶
func (x *ModelReadyRequest) GetName() string
func (*ModelReadyRequest) GetVersion ¶
func (x *ModelReadyRequest) GetVersion() string
func (*ModelReadyRequest) ProtoMessage ¶
func (*ModelReadyRequest) ProtoMessage()
func (*ModelReadyRequest) ProtoReflect ¶
func (x *ModelReadyRequest) ProtoReflect() protoreflect.Message
func (*ModelReadyRequest) Reset ¶
func (x *ModelReadyRequest) Reset()
func (*ModelReadyRequest) String ¶
func (x *ModelReadyRequest) String() string
type ModelReadyResponse ¶
type ModelReadyResponse struct { // True if the model is ready, false if not ready. Ready bool `protobuf:"varint,1,opt,name=ready,proto3" json:"ready,omitempty"` // contains filtered or unexported fields }
func (*ModelReadyResponse) Descriptor
deprecated
func (*ModelReadyResponse) Descriptor() ([]byte, []int)
Deprecated: Use ModelReadyResponse.ProtoReflect.Descriptor instead.
func (*ModelReadyResponse) GetReady ¶
func (x *ModelReadyResponse) GetReady() bool
func (*ModelReadyResponse) ProtoMessage ¶
func (*ModelReadyResponse) ProtoMessage()
func (*ModelReadyResponse) ProtoReflect ¶
func (x *ModelReadyResponse) ProtoReflect() protoreflect.Message
func (*ModelReadyResponse) Reset ¶
func (x *ModelReadyResponse) Reset()
func (*ModelReadyResponse) String ¶
func (x *ModelReadyResponse) String() string
type ServerLiveRequest ¶
type ServerLiveRequest struct {
// contains filtered or unexported fields
}
func (*ServerLiveRequest) Descriptor
deprecated
func (*ServerLiveRequest) Descriptor() ([]byte, []int)
Deprecated: Use ServerLiveRequest.ProtoReflect.Descriptor instead.
func (*ServerLiveRequest) ProtoMessage ¶
func (*ServerLiveRequest) ProtoMessage()
func (*ServerLiveRequest) ProtoReflect ¶
func (x *ServerLiveRequest) ProtoReflect() protoreflect.Message
func (*ServerLiveRequest) Reset ¶
func (x *ServerLiveRequest) Reset()
func (*ServerLiveRequest) String ¶
func (x *ServerLiveRequest) String() string
type ServerLiveResponse ¶
type ServerLiveResponse struct { // True if the inference server is live, false if not live. Live bool `protobuf:"varint,1,opt,name=live,proto3" json:"live,omitempty"` // contains filtered or unexported fields }
func (*ServerLiveResponse) Descriptor
deprecated
func (*ServerLiveResponse) Descriptor() ([]byte, []int)
Deprecated: Use ServerLiveResponse.ProtoReflect.Descriptor instead.
func (*ServerLiveResponse) GetLive ¶
func (x *ServerLiveResponse) GetLive() bool
func (*ServerLiveResponse) ProtoMessage ¶
func (*ServerLiveResponse) ProtoMessage()
func (*ServerLiveResponse) ProtoReflect ¶
func (x *ServerLiveResponse) ProtoReflect() protoreflect.Message
func (*ServerLiveResponse) Reset ¶
func (x *ServerLiveResponse) Reset()
func (*ServerLiveResponse) String ¶
func (x *ServerLiveResponse) String() string
type ServerMetadataRequest ¶
type ServerMetadataRequest struct {
// contains filtered or unexported fields
}
func (*ServerMetadataRequest) Descriptor
deprecated
func (*ServerMetadataRequest) Descriptor() ([]byte, []int)
Deprecated: Use ServerMetadataRequest.ProtoReflect.Descriptor instead.
func (*ServerMetadataRequest) ProtoMessage ¶
func (*ServerMetadataRequest) ProtoMessage()
func (*ServerMetadataRequest) ProtoReflect ¶
func (x *ServerMetadataRequest) ProtoReflect() protoreflect.Message
func (*ServerMetadataRequest) Reset ¶
func (x *ServerMetadataRequest) Reset()
func (*ServerMetadataRequest) String ¶
func (x *ServerMetadataRequest) String() string
type ServerMetadataResponse ¶
type ServerMetadataResponse struct { // The server name. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // The server version. Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` // The extensions supported by the server. Extensions []string `protobuf:"bytes,3,rep,name=extensions,proto3" json:"extensions,omitempty"` // contains filtered or unexported fields }
func (*ServerMetadataResponse) Descriptor
deprecated
func (*ServerMetadataResponse) Descriptor() ([]byte, []int)
Deprecated: Use ServerMetadataResponse.ProtoReflect.Descriptor instead.
func (*ServerMetadataResponse) GetExtensions ¶
func (x *ServerMetadataResponse) GetExtensions() []string
func (*ServerMetadataResponse) GetName ¶
func (x *ServerMetadataResponse) GetName() string
func (*ServerMetadataResponse) GetVersion ¶
func (x *ServerMetadataResponse) GetVersion() string
func (*ServerMetadataResponse) ProtoMessage ¶
func (*ServerMetadataResponse) ProtoMessage()
func (*ServerMetadataResponse) ProtoReflect ¶
func (x *ServerMetadataResponse) ProtoReflect() protoreflect.Message
func (*ServerMetadataResponse) Reset ¶
func (x *ServerMetadataResponse) Reset()
func (*ServerMetadataResponse) String ¶
func (x *ServerMetadataResponse) String() string
type ServerReadyRequest ¶
type ServerReadyRequest struct {
// contains filtered or unexported fields
}
func (*ServerReadyRequest) Descriptor
deprecated
func (*ServerReadyRequest) Descriptor() ([]byte, []int)
Deprecated: Use ServerReadyRequest.ProtoReflect.Descriptor instead.
func (*ServerReadyRequest) ProtoMessage ¶
func (*ServerReadyRequest) ProtoMessage()
func (*ServerReadyRequest) ProtoReflect ¶
func (x *ServerReadyRequest) ProtoReflect() protoreflect.Message
func (*ServerReadyRequest) Reset ¶
func (x *ServerReadyRequest) Reset()
func (*ServerReadyRequest) String ¶
func (x *ServerReadyRequest) String() string
type ServerReadyResponse ¶
type ServerReadyResponse struct { // True if the inference server is ready, false if not ready. Ready bool `protobuf:"varint,1,opt,name=ready,proto3" json:"ready,omitempty"` // contains filtered or unexported fields }
func (*ServerReadyResponse) Descriptor
deprecated
func (*ServerReadyResponse) Descriptor() ([]byte, []int)
Deprecated: Use ServerReadyResponse.ProtoReflect.Descriptor instead.
func (*ServerReadyResponse) GetReady ¶
func (x *ServerReadyResponse) GetReady() bool
func (*ServerReadyResponse) ProtoMessage ¶
func (*ServerReadyResponse) ProtoMessage()
func (*ServerReadyResponse) ProtoReflect ¶
func (x *ServerReadyResponse) ProtoReflect() protoreflect.Message
func (*ServerReadyResponse) Reset ¶
func (x *ServerReadyResponse) Reset()
func (*ServerReadyResponse) String ¶
func (x *ServerReadyResponse) String() string
type UnimplementedGRPCInferenceServiceServer ¶
type UnimplementedGRPCInferenceServiceServer struct { }
UnimplementedGRPCInferenceServiceServer can be embedded to have forward compatible implementations.
func (*UnimplementedGRPCInferenceServiceServer) ModelInfer ¶
func (*UnimplementedGRPCInferenceServiceServer) ModelInfer(context.Context, *ModelInferRequest) (*ModelInferResponse, error)
func (*UnimplementedGRPCInferenceServiceServer) ModelMetadata ¶
func (*UnimplementedGRPCInferenceServiceServer) ModelMetadata(context.Context, *ModelMetadataRequest) (*ModelMetadataResponse, error)
func (*UnimplementedGRPCInferenceServiceServer) ModelReady ¶
func (*UnimplementedGRPCInferenceServiceServer) ModelReady(context.Context, *ModelReadyRequest) (*ModelReadyResponse, error)
func (*UnimplementedGRPCInferenceServiceServer) ServerLive ¶
func (*UnimplementedGRPCInferenceServiceServer) ServerLive(context.Context, *ServerLiveRequest) (*ServerLiveResponse, error)
func (*UnimplementedGRPCInferenceServiceServer) ServerMetadata ¶
func (*UnimplementedGRPCInferenceServiceServer) ServerMetadata(context.Context, *ServerMetadataRequest) (*ServerMetadataResponse, error)
func (*UnimplementedGRPCInferenceServiceServer) ServerReady ¶
func (*UnimplementedGRPCInferenceServiceServer) ServerReady(context.Context, *ServerReadyRequest) (*ServerReadyResponse, error)