Package | Description |
---|---|
org.bytedeco.onnx | |
org.bytedeco.onnx.global |
Modifier and Type | Method and Description |
---|---|
void |
OpSchema.CheckInputOutputType(InferenceContext arg0) |
Modifier and Type | Method and Description |
---|---|
static void |
onnx.appendSingleDimCopiedFromInputTypeToOutputType(InferenceContext ctx,
long inputIndex,
long outputIndex,
long fromDimIndex) |
static void |
onnx.checkInputRank(InferenceContext ctx,
long input_index,
int expected_rank) |
static void |
onnx.dummyInferenceFunction(InferenceContext arg0) |
static BytePointer |
onnx.getAttribute(InferenceContext ctx,
BytePointer attributeName,
BytePointer defaultValue) |
static long |
onnx.getAttribute(InferenceContext ctx,
BytePointer attributeName,
long defaultValue) |
static long |
onnx.getAttribute(InferenceContext ctx,
String attributeName,
long defaultValue) |
static String |
onnx.getAttribute(InferenceContext ctx,
String attributeName,
String defaultValue) |
static TensorShapeProto |
onnx.getInputShape(InferenceContext ctx,
long n) |
static TensorShapeProto |
onnx.getOptionalInputShape(InferenceContext ctx,
long n) |
static TensorShapeProto |
onnx.getOutputShape(InferenceContext ctx,
long n) |
static TensorShapeProto |
onnx.getOutputShape(InferenceContext ctx,
long n,
int default_type) |
static TensorShapeProto |
onnx.getShapeInput(InferenceContext ctx,
long input_index,
boolean[] found) |
static TensorShapeProto |
onnx.getShapeInput(InferenceContext ctx,
long input_index,
BoolPointer found) |
static void |
onnx.InferShapeForFunctionNode(FunctionProto func,
ISchemaRegistry schema_registry,
InferenceContext ctx) |
static void |
onnx.InferShapeForFunctionNode(FunctionProto func,
ISchemaRegistry schema_registry,
InferenceContext ctx,
ShapeInferenceOptions options,
SizeTStringMap model_local_functions_map,
SymbolTable symbolTable,
StringTensorShapeProtoMap generated_shape_data_by_name)
ModelLocalFunctionsMap is a map of function id -> model local function proto
All the ONNX helper utilities expect the function id ==
|
static void |
onnx.InferShapeForFunctionNode(FunctionProto func_proto,
StringIntMap func_opset_imports,
ISchemaRegistry schema_registry,
InferenceContext ctx) |
static void |
onnx.InferShapeForFunctionNode(FunctionProto func_proto,
StringIntMap func_opset_imports,
ISchemaRegistry schema_registry,
InferenceContext ctx,
ShapeInferenceOptions options,
SizeTStringMap model_local_functions_map,
SymbolTable symbolTable,
StringTensorShapeProtoMap generated_shape_data_by_name)
ModelLocalFunctionsMap is a map of function id -> model local function proto
All the ONNX helper utilities expect the function id ==
|
static void |
onnx.propagateElemTypeFromAttributeToOutput(InferenceContext ctx,
BytePointer attributeName,
long outputIndex) |
static void |
onnx.propagateElemTypeFromAttributeToOutput(InferenceContext ctx,
BytePointer attributeName,
long outputIndex,
int expected_type) |
static void |
onnx.propagateElemTypeFromAttributeToOutput(InferenceContext ctx,
BytePointer attributeName,
long outputIndex,
int expected_type,
int default_value) |
static void |
onnx.propagateElemTypeFromAttributeToOutput(InferenceContext ctx,
String attributeName,
long outputIndex) |
static void |
onnx.propagateElemTypeFromAttributeToOutput(InferenceContext ctx,
String attributeName,
long outputIndex,
int expected_type) |
static void |
onnx.propagateElemTypeFromAttributeToOutput(InferenceContext ctx,
String attributeName,
long outputIndex,
int expected_type,
int default_value) |
static void |
onnx.propagateElemTypeFromDtypeToOutput(InferenceContext ctx,
AttributeProto attr,
long outputIndex) |
static void |
onnx.propagateElemTypeFromDtypeToOutput(InferenceContext ctx,
int data_type,
long outputIndex) |
static void |
onnx.propagateElemTypeFromDtypeToOutput(InferenceContext ctx,
int data_type,
long outputIndex,
int expected_value_case) |
static void |
onnx.propagateElemTypeFromInputToOutput(InferenceContext ctx,
long inputIndex,
long outputIndex) |
static void |
onnx.propagateElemTypeFromTensorInputToOutput(InferenceContext ctx,
long inputIndex,
long outputIndex) |
static void |
onnx.propagateShapeAndTypeFromFirstInput(InferenceContext ctx) |
static void |
onnx.propagateShapeFromAttributeToOutput(InferenceContext ctx,
BytePointer attributeName,
long outputIndex) |
static void |
onnx.propagateShapeFromAttributeToOutput(InferenceContext ctx,
BytePointer attributeName,
long outputIndex,
int default_type) |
static void |
onnx.propagateShapeFromAttributeToOutput(InferenceContext ctx,
String attributeName,
long outputIndex) |
static void |
onnx.propagateShapeFromAttributeToOutput(InferenceContext ctx,
String attributeName,
long outputIndex,
int default_type) |
static void |
onnx.propagateShapeFromInputToOutput(InferenceContext ctx,
long inputIndex,
long outputIndex) |
static void |
onnx.unifyInputDim(InferenceContext ctx,
long input_index,
int dim_index,
Dimension dim) |
static void |
onnx.updateOutputElemType(InferenceContext ctx,
long outputIndex,
int elemType) |
static void |
onnx.updateOutputElemType(InferenceContext ctx,
long outputIndex,
int elemType,
int expected_type) |
static void |
onnx.updateOutputShape(InferenceContext ctx,
long outputIndex,
TensorProto tensorProto) |
static void |
onnx.updateOutputShape(InferenceContext ctx,
long outputIndex,
TensorProto tensorProto,
int default_type) |
static void |
onnx.updateOutputShape(InferenceContext ctx,
long outputIndex,
TensorShapeProto shape) |
static void |
onnx.updateOutputShape(InferenceContext ctx,
long outputIndex,
TensorShapeProto shape,
int default_type) |
Copyright © 2024. All rights reserved.