@Name(value="caffe::Net<float>") @NoOffset @Properties(inherit=caffe.class) public class FloatNet extends Pointer
Modifier and Type | Class and Description |
---|---|
static class |
FloatNet.Callback |
Pointer.CustomDeallocator, Pointer.Deallocator, Pointer.NativeDeallocator, Pointer.ReferenceCounter
Constructor and Description |
---|
FloatNet(BytePointer param_file,
int phase) |
FloatNet(BytePointer param_file,
int phase,
int level,
StringVector stages) |
FloatNet(NetParameter param) |
FloatNet(Pointer p)
Pointer cast constructor.
|
FloatNet(String param_file,
int phase) |
FloatNet(String param_file,
int phase,
int level,
StringVector stages) |
Modifier and Type | Method and Description |
---|---|
void |
add_after_backward(FloatNet.Callback value) |
void |
add_after_forward(FloatNet.Callback value) |
void |
add_before_backward(FloatNet.Callback value) |
void |
add_before_forward(FloatNet.Callback value) |
PointerPointer |
after_backward() |
PointerPointer |
after_forward() |
void |
Backward()
The network backward should take no input and output, since it solely
computes the gradient w.r.t the parameters, and the data has already been
provided during the forward pass.
|
void |
BackwardFrom(int start) |
void |
BackwardFromTo(int start,
int end) |
void |
BackwardTo(int end) |
PointerPointer |
before_backward() |
PointerPointer |
before_forward() |
FloatBlob |
blob_by_name(BytePointer blob_name) |
FloatBlob |
blob_by_name(String blob_name) |
FloatPointer |
blob_loss_weights() |
StringVector |
blob_names()
\brief returns the blob names
|
FloatBlobSharedVector |
blobs()
\brief returns the blobs
|
IntPointer |
bottom_ids(int i)
\brief returns the ids of the bottom blobs of layer i
|
BoolVectorVector |
bottom_need_backward() |
FloatBlobVectorVector |
bottom_vecs()
\brief returns the bottom vecs for each layer -- usually you won't
need this unless you do per-layer checks such as gradients.
|
void |
ClearParamDiffs()
\brief Zeroes out the diffs of all net parameters.
|
void |
CopyTrainedLayersFrom(BytePointer trained_filename) |
void |
CopyTrainedLayersFrom(NetParameter param)
\brief For an already initialized net, copies the pre-trained layers from
another Net.
|
void |
CopyTrainedLayersFrom(String trained_filename) |
void |
CopyTrainedLayersFromBinaryProto(BytePointer trained_filename) |
void |
CopyTrainedLayersFromBinaryProto(String trained_filename) |
void |
CopyTrainedLayersFromHDF5(BytePointer trained_filename) |
void |
CopyTrainedLayersFromHDF5(String trained_filename) |
static void |
FilterNet(NetParameter param,
NetParameter param_filtered)
\brief Remove layers that the user specified should be excluded given the current
phase, level, and stage.
|
FloatBlobVector |
Forward() |
FloatBlobVector |
Forward(float[] loss) |
FloatBlobVector |
Forward(FloatBlobVector bottom) |
FloatBlobVector |
Forward(FloatBlobVector bottom,
float[] loss) |
FloatBlobVector |
Forward(FloatBlobVector bottom,
FloatBuffer loss) |
FloatBlobVector |
Forward(FloatBlobVector bottom,
FloatPointer loss)
\brief DEPRECATED; set input blobs then use Forward() instead.
|
FloatBlobVector |
Forward(FloatBuffer loss) |
FloatBlobVector |
Forward(FloatPointer loss)
\brief Run Forward and return the result.
|
float |
ForwardBackward() |
float |
ForwardFrom(int start) |
float |
ForwardFromTo(int start,
int end)
The From and To variants of Forward and Backward operate on the
(topological) ordering by which the net is specified.
|
FloatBlobVector |
ForwardPrefilled() |
FloatBlobVector |
ForwardPrefilled(float[] loss) |
FloatBlobVector |
ForwardPrefilled(FloatBuffer loss) |
FloatBlobVector |
ForwardPrefilled(FloatPointer loss)
\brief DEPRECATED; use Forward() instead.
|
float |
ForwardTo(int end) |
boolean |
has_blob(BytePointer blob_name) |
boolean |
has_blob(String blob_name) |
boolean |
has_layer(BytePointer layer_name) |
boolean |
has_layer(String layer_name) |
BoolVector |
has_params_decay() |
BoolVector |
has_params_lr() |
void |
Init(NetParameter param)
\brief Initialize a network with a NetParameter.
|
IntPointer |
input_blob_indices() |
FloatBlobVector |
input_blobs() |
FloatLayer |
layer_by_name(BytePointer layer_name) |
<L extends FloatLayer> |
layer_by_name(Class<L> cls,
BytePointer layer_name) |
<L extends FloatLayer> |
layer_by_name(Class<L> cls,
String layer_name) |
FloatLayer |
layer_by_name(String layer_name) |
StringVector |
layer_names()
\brief returns the layer names
|
BoolVector |
layer_need_backward() |
FloatLayerSharedVector |
layers()
\brief returns the layers
|
FloatBlobVector |
learnable_params() |
BytePointer |
name()
\brief returns the network name.
|
int |
num_inputs()
\brief Input and output blob numbers
|
int |
num_outputs() |
IntPointer |
output_blob_indices() |
FloatBlobVector |
output_blobs() |
StringVector |
param_display_names() |
StringIntMap |
param_names_index() |
IntPointer |
param_owners() |
FloatPointer |
params_lr()
\brief returns the learnable parameter learning rate multipliers
|
FloatPointer |
params_weight_decay()
\brief returns the learnable parameter decay multipliers
|
FloatBlobSharedVector |
params()
\brief returns the parameters
|
int |
phase()
\brief returns the phase: TRAIN or TEST
|
void |
Reshape()
\brief Reshape all layers from bottom to top.
|
void |
set_debug_info(boolean value) |
void |
ShareTrainedLayersWith(FloatNet other)
\brief For an already initialized net, implicitly copies (i.e., using no
additional memory) the pre-trained layers from another Net.
|
void |
ShareWeights()
\brief Shares weight data of owner blobs with shared blobs.
|
static boolean |
StateMeetsRule(NetState state,
NetStateRule rule,
BytePointer layer_name)
\brief return whether NetState state meets NetStateRule rule
|
static boolean |
StateMeetsRule(NetState state,
NetStateRule rule,
String layer_name) |
void |
ToHDF5(BytePointer filename) |
void |
ToHDF5(BytePointer filename,
boolean write_diff)
\brief Writes the net to an HDF5 file.
|
void |
ToHDF5(String filename) |
void |
ToHDF5(String filename,
boolean write_diff) |
IntPointer |
top_ids(int i)
\brief returns the ids of the top blobs of layer i
|
FloatBlobVectorVector |
top_vecs()
\brief returns the top vecs for each layer -- usually you won't
need this unless you do per-layer checks such as gradients.
|
void |
ToProto(NetParameter param) |
void |
ToProto(NetParameter param,
boolean write_diff)
\brief Writes the net to a proto.
|
void |
Update()
\brief Updates the network weights based on the diff values computed.
|
address, asBuffer, asByteBuffer, availablePhysicalBytes, calloc, capacity, capacity, close, deallocate, deallocate, deallocateReferences, deallocator, deallocator, equals, fill, formatBytes, free, getDirectBufferAddress, getPointer, getPointer, getPointer, getPointer, hashCode, interruptDeallocatorThread, isNull, isNull, limit, limit, malloc, maxBytes, maxPhysicalBytes, memchr, memcmp, memcpy, memmove, memset, offsetAddress, offsetof, offsetof, parseBytes, physicalBytes, physicalBytesInaccurate, position, position, put, realloc, referenceCount, releaseReference, retainReference, setNull, sizeof, sizeof, toString, totalBytes, totalCount, totalPhysicalBytes, withDeallocator, zero
public FloatNet(Pointer p)
Pointer(Pointer)
.public FloatNet(@Const @ByRef NetParameter param)
public FloatNet(@StdString BytePointer param_file, @Cast(value="caffe::Phase") int phase, int level, @Const StringVector stages)
public FloatNet(@StdString BytePointer param_file, @Cast(value="caffe::Phase") int phase)
public FloatNet(@StdString String param_file, @Cast(value="caffe::Phase") int phase, int level, @Const StringVector stages)
public FloatNet(@StdString String param_file, @Cast(value="caffe::Phase") int phase)
public void Init(@Const @ByRef NetParameter param)
@Const @ByRef public FloatBlobVector Forward(FloatPointer loss)
@Const @ByRef public FloatBlobVector Forward()
@Const @ByRef public FloatBlobVector Forward(FloatBuffer loss)
@Const @ByRef public FloatBlobVector Forward(float[] loss)
@Const @ByRef public FloatBlobVector ForwardPrefilled(FloatPointer loss)
@Const @ByRef public FloatBlobVector ForwardPrefilled()
@Const @ByRef public FloatBlobVector ForwardPrefilled(FloatBuffer loss)
@Const @ByRef public FloatBlobVector ForwardPrefilled(float[] loss)
public float ForwardFromTo(int start, int end)
public float ForwardFrom(int start)
public float ForwardTo(int end)
@Const @ByRef public FloatBlobVector Forward(@Const @ByRef FloatBlobVector bottom, FloatPointer loss)
@Const @ByRef public FloatBlobVector Forward(@Const @ByRef FloatBlobVector bottom)
@Const @ByRef public FloatBlobVector Forward(@Const @ByRef FloatBlobVector bottom, FloatBuffer loss)
@Const @ByRef public FloatBlobVector Forward(@Const @ByRef FloatBlobVector bottom, float[] loss)
public void ClearParamDiffs()
public void Backward()
public void BackwardFromTo(int start, int end)
public void BackwardFrom(int start)
public void BackwardTo(int end)
public void Reshape()
public float ForwardBackward()
public void Update()
public void ShareWeights()
public void ShareTrainedLayersWith(@Const FloatNet other)
public void CopyTrainedLayersFrom(@Const @ByRef NetParameter param)
public void CopyTrainedLayersFrom(@StdString BytePointer trained_filename)
public void CopyTrainedLayersFrom(@StdString String trained_filename)
public void CopyTrainedLayersFromBinaryProto(@StdString BytePointer trained_filename)
public void CopyTrainedLayersFromBinaryProto(@StdString String trained_filename)
public void CopyTrainedLayersFromHDF5(@StdString BytePointer trained_filename)
public void CopyTrainedLayersFromHDF5(@StdString String trained_filename)
public void ToProto(NetParameter param, @Cast(value="bool") boolean write_diff)
public void ToProto(NetParameter param)
public void ToHDF5(@StdString BytePointer filename, @Cast(value="bool") boolean write_diff)
public void ToHDF5(@StdString BytePointer filename)
public void ToHDF5(@StdString String filename, @Cast(value="bool") boolean write_diff)
public void ToHDF5(@StdString String filename)
@StdString public BytePointer name()
@Const @ByRef public StringVector layer_names()
@Const @ByRef public StringVector blob_names()
@Const @ByRef public FloatBlobSharedVector blobs()
@Const @ByRef public FloatLayerSharedVector layers()
@Const @ByRef public FloatBlobVectorVector bottom_vecs()
@Const @ByRef public FloatBlobVectorVector top_vecs()
@StdVector public IntPointer top_ids(int i)
@StdVector public IntPointer bottom_ids(int i)
@Const @ByRef public BoolVectorVector bottom_need_backward()
@StdVector public FloatPointer blob_loss_weights()
@Const @ByRef public BoolVector layer_need_backward()
@Const @ByRef public FloatBlobSharedVector params()
@Const @ByRef public FloatBlobVector learnable_params()
@StdVector public FloatPointer params_lr()
@Const @ByRef public BoolVector has_params_lr()
@StdVector public FloatPointer params_weight_decay()
@Const @ByRef public BoolVector has_params_decay()
@Const @ByRef public StringIntMap param_names_index()
@StdVector public IntPointer param_owners()
@Const @ByRef public StringVector param_display_names()
public int num_inputs()
public int num_outputs()
@Const @ByRef public FloatBlobVector input_blobs()
@Const @ByRef public FloatBlobVector output_blobs()
@StdVector public IntPointer input_blob_indices()
@StdVector public IntPointer output_blob_indices()
@Cast(value="bool") public boolean has_blob(@StdString BytePointer blob_name)
@Cast(value="bool") public boolean has_blob(@StdString String blob_name)
@Const @SharedPtr @ByVal public FloatBlob blob_by_name(@StdString BytePointer blob_name)
@Const @SharedPtr @ByVal public FloatBlob blob_by_name(@StdString String blob_name)
@Cast(value="bool") public boolean has_layer(@StdString BytePointer layer_name)
@Cast(value="bool") public boolean has_layer(@StdString String layer_name)
public FloatLayer layer_by_name(BytePointer layer_name)
public FloatLayer layer_by_name(String layer_name)
@Const @Cast(value={"","boost::shared_ptr<caffe::Layer<float> >"}) @SharedPtr @ByVal public <L extends FloatLayer> L layer_by_name(Class<L> cls, @StdString BytePointer layer_name)
@Const @Cast(value={"","boost::shared_ptr<caffe::Layer<float> >"}) @SharedPtr @ByVal public <L extends FloatLayer> L layer_by_name(Class<L> cls, @StdString String layer_name)
public static void FilterNet(@Const @ByRef NetParameter param, NetParameter param_filtered)
@Cast(value="bool") public static boolean StateMeetsRule(@Const @ByRef NetState state, @Const @ByRef NetStateRule rule, @StdString BytePointer layer_name)
@Cast(value="bool") public static boolean StateMeetsRule(@Const @ByRef NetState state, @Const @ByRef NetStateRule rule, @StdString String layer_name)
@Cast(value="caffe::Net<float>::Callback**") @StdVector public PointerPointer before_forward()
public void add_before_forward(FloatNet.Callback value)
@Cast(value="caffe::Net<float>::Callback**") @StdVector public PointerPointer after_forward()
public void add_after_forward(FloatNet.Callback value)
@Cast(value="caffe::Net<float>::Callback**") @StdVector public PointerPointer before_backward()
public void add_before_backward(FloatNet.Callback value)
@Cast(value="caffe::Net<float>::Callback**") @StdVector public PointerPointer after_backward()
public void add_after_backward(FloatNet.Callback value)
Copyright © 2022. All rights reserved.