@Name(value="caffe::Net<double>") @NoOffset @Properties(inherit=caffe.class) public class DoubleNet extends Pointer
Modifier and Type | Class and Description |
---|---|
static class |
DoubleNet.Callback |
Pointer.CustomDeallocator, Pointer.Deallocator, Pointer.NativeDeallocator, Pointer.ReferenceCounter
Constructor and Description |
---|
DoubleNet(BytePointer param_file,
int phase) |
DoubleNet(BytePointer param_file,
int phase,
int level,
StringVector stages) |
DoubleNet(NetParameter param) |
DoubleNet(Pointer p)
Pointer cast constructor.
|
DoubleNet(String param_file,
int phase) |
DoubleNet(String param_file,
int phase,
int level,
StringVector stages) |
Modifier and Type | Method and Description |
---|---|
void |
add_after_backward(DoubleNet.Callback value) |
void |
add_after_forward(DoubleNet.Callback value) |
void |
add_before_backward(DoubleNet.Callback value) |
void |
add_before_forward(DoubleNet.Callback value) |
PointerPointer |
after_backward() |
PointerPointer |
after_forward() |
void |
Backward()
The network backward should take no input and output, since it solely
computes the gradient w.r.t the parameters, and the data has already been
provided during the forward pass.
|
void |
BackwardFrom(int start) |
void |
BackwardFromTo(int start,
int end) |
void |
BackwardTo(int end) |
PointerPointer |
before_backward() |
PointerPointer |
before_forward() |
DoubleBlob |
blob_by_name(BytePointer blob_name) |
DoubleBlob |
blob_by_name(String blob_name) |
DoublePointer |
blob_loss_weights() |
StringVector |
blob_names()
\brief returns the blob names
|
DoubleBlobSharedVector |
blobs()
\brief returns the blobs
|
IntPointer |
bottom_ids(int i)
\brief returns the ids of the bottom blobs of layer i
|
BoolVectorVector |
bottom_need_backward() |
DoubleBlobVectorVector |
bottom_vecs()
\brief returns the bottom vecs for each layer -- usually you won't
need this unless you do per-layer checks such as gradients.
|
void |
ClearParamDiffs()
\brief Zeroes out the diffs of all net parameters.
|
void |
CopyTrainedLayersFrom(BytePointer trained_filename) |
void |
CopyTrainedLayersFrom(NetParameter param)
\brief For an already initialized net, copies the pre-trained layers from
another Net.
|
void |
CopyTrainedLayersFrom(String trained_filename) |
void |
CopyTrainedLayersFromBinaryProto(BytePointer trained_filename) |
void |
CopyTrainedLayersFromBinaryProto(String trained_filename) |
void |
CopyTrainedLayersFromHDF5(BytePointer trained_filename) |
void |
CopyTrainedLayersFromHDF5(String trained_filename) |
static void |
FilterNet(NetParameter param,
NetParameter param_filtered)
\brief Remove layers that the user specified should be excluded given the current
phase, level, and stage.
|
DoubleBlobVector |
Forward() |
DoubleBlobVector |
Forward(double[] loss) |
DoubleBlobVector |
Forward(DoubleBlobVector bottom) |
DoubleBlobVector |
Forward(DoubleBlobVector bottom,
double[] loss) |
DoubleBlobVector |
Forward(DoubleBlobVector bottom,
DoubleBuffer loss) |
DoubleBlobVector |
Forward(DoubleBlobVector bottom,
DoublePointer loss)
\brief DEPRECATED; set input blobs then use Forward() instead.
|
DoubleBlobVector |
Forward(DoubleBuffer loss) |
DoubleBlobVector |
Forward(DoublePointer loss)
\brief Run Forward and return the result.
|
double |
ForwardBackward() |
double |
ForwardFrom(int start) |
double |
ForwardFromTo(int start,
int end)
The From and To variants of Forward and Backward operate on the
(topological) ordering by which the net is specified.
|
DoubleBlobVector |
ForwardPrefilled() |
DoubleBlobVector |
ForwardPrefilled(double[] loss) |
DoubleBlobVector |
ForwardPrefilled(DoubleBuffer loss) |
DoubleBlobVector |
ForwardPrefilled(DoublePointer loss)
\brief DEPRECATED; use Forward() instead.
|
double |
ForwardTo(int end) |
boolean |
has_blob(BytePointer blob_name) |
boolean |
has_blob(String blob_name) |
boolean |
has_layer(BytePointer layer_name) |
boolean |
has_layer(String layer_name) |
BoolVector |
has_params_decay() |
BoolVector |
has_params_lr() |
void |
Init(NetParameter param)
\brief Initialize a network with a NetParameter.
|
IntPointer |
input_blob_indices() |
DoubleBlobVector |
input_blobs() |
DoubleLayer |
layer_by_name(BytePointer layer_name) |
<L extends DoubleLayer> |
layer_by_name(Class<L> cls,
BytePointer layer_name) |
<L extends DoubleLayer> |
layer_by_name(Class<L> cls,
String layer_name) |
DoubleLayer |
layer_by_name(String layer_name) |
StringVector |
layer_names()
\brief returns the layer names
|
BoolVector |
layer_need_backward() |
DoubleLayerSharedVector |
layers()
\brief returns the layers
|
DoubleBlobVector |
learnable_params() |
BytePointer |
name()
\brief returns the network name.
|
int |
num_inputs()
\brief Input and output blob numbers
|
int |
num_outputs() |
IntPointer |
output_blob_indices() |
DoubleBlobVector |
output_blobs() |
StringVector |
param_display_names() |
StringIntMap |
param_names_index() |
IntPointer |
param_owners() |
FloatPointer |
params_lr()
\brief returns the learnable parameter learning rate multipliers
|
FloatPointer |
params_weight_decay()
\brief returns the learnable parameter decay multipliers
|
DoubleBlobSharedVector |
params()
\brief returns the parameters
|
int |
phase()
\brief returns the phase: TRAIN or TEST
|
void |
Reshape()
\brief Reshape all layers from bottom to top.
|
void |
set_debug_info(boolean value) |
void |
ShareTrainedLayersWith(DoubleNet other)
\brief For an already initialized net, implicitly copies (i.e., using no
additional memory) the pre-trained layers from another Net.
|
void |
ShareWeights()
\brief Shares weight data of owner blobs with shared blobs.
|
static boolean |
StateMeetsRule(NetState state,
NetStateRule rule,
BytePointer layer_name)
\brief return whether NetState state meets NetStateRule rule
|
static boolean |
StateMeetsRule(NetState state,
NetStateRule rule,
String layer_name) |
void |
ToHDF5(BytePointer filename) |
void |
ToHDF5(BytePointer filename,
boolean write_diff)
\brief Writes the net to an HDF5 file.
|
void |
ToHDF5(String filename) |
void |
ToHDF5(String filename,
boolean write_diff) |
IntPointer |
top_ids(int i)
\brief returns the ids of the top blobs of layer i
|
DoubleBlobVectorVector |
top_vecs()
\brief returns the top vecs for each layer -- usually you won't
need this unless you do per-layer checks such as gradients.
|
void |
ToProto(NetParameter param) |
void |
ToProto(NetParameter param,
boolean write_diff)
\brief Writes the net to a proto.
|
void |
Update()
\brief Updates the network weights based on the diff values computed.
|
address, asBuffer, asByteBuffer, availablePhysicalBytes, calloc, capacity, capacity, close, deallocate, deallocate, deallocateReferences, deallocator, deallocator, equals, fill, formatBytes, free, getDirectBufferAddress, getPointer, getPointer, getPointer, getPointer, hashCode, interruptDeallocatorThread, isNull, isNull, limit, limit, malloc, maxBytes, maxPhysicalBytes, memchr, memcmp, memcpy, memmove, memset, offsetAddress, offsetof, offsetof, parseBytes, physicalBytes, physicalBytesInaccurate, position, position, put, realloc, referenceCount, releaseReference, retainReference, setNull, sizeof, sizeof, toString, totalBytes, totalCount, totalPhysicalBytes, withDeallocator, zero
public DoubleNet(Pointer p)
Pointer(Pointer)
.public DoubleNet(@Const @ByRef NetParameter param)
public DoubleNet(@StdString BytePointer param_file, @Cast(value="caffe::Phase") int phase, int level, @Const StringVector stages)
public DoubleNet(@StdString BytePointer param_file, @Cast(value="caffe::Phase") int phase)
public DoubleNet(@StdString String param_file, @Cast(value="caffe::Phase") int phase, int level, @Const StringVector stages)
public DoubleNet(@StdString String param_file, @Cast(value="caffe::Phase") int phase)
public void Init(@Const @ByRef NetParameter param)
@Const @ByRef public DoubleBlobVector Forward(DoublePointer loss)
@Const @ByRef public DoubleBlobVector Forward()
@Const @ByRef public DoubleBlobVector Forward(DoubleBuffer loss)
@Const @ByRef public DoubleBlobVector Forward(double[] loss)
@Const @ByRef public DoubleBlobVector ForwardPrefilled(DoublePointer loss)
@Const @ByRef public DoubleBlobVector ForwardPrefilled()
@Const @ByRef public DoubleBlobVector ForwardPrefilled(DoubleBuffer loss)
@Const @ByRef public DoubleBlobVector ForwardPrefilled(double[] loss)
public double ForwardFromTo(int start, int end)
public double ForwardFrom(int start)
public double ForwardTo(int end)
@Const @ByRef public DoubleBlobVector Forward(@Const @ByRef DoubleBlobVector bottom, DoublePointer loss)
@Const @ByRef public DoubleBlobVector Forward(@Const @ByRef DoubleBlobVector bottom)
@Const @ByRef public DoubleBlobVector Forward(@Const @ByRef DoubleBlobVector bottom, DoubleBuffer loss)
@Const @ByRef public DoubleBlobVector Forward(@Const @ByRef DoubleBlobVector bottom, double[] loss)
public void ClearParamDiffs()
public void Backward()
public void BackwardFromTo(int start, int end)
public void BackwardFrom(int start)
public void BackwardTo(int end)
public void Reshape()
public double ForwardBackward()
public void Update()
public void ShareWeights()
public void ShareTrainedLayersWith(@Const DoubleNet other)
public void CopyTrainedLayersFrom(@Const @ByRef NetParameter param)
public void CopyTrainedLayersFrom(@StdString BytePointer trained_filename)
public void CopyTrainedLayersFrom(@StdString String trained_filename)
public void CopyTrainedLayersFromBinaryProto(@StdString BytePointer trained_filename)
public void CopyTrainedLayersFromBinaryProto(@StdString String trained_filename)
public void CopyTrainedLayersFromHDF5(@StdString BytePointer trained_filename)
public void CopyTrainedLayersFromHDF5(@StdString String trained_filename)
public void ToProto(NetParameter param, @Cast(value="bool") boolean write_diff)
public void ToProto(NetParameter param)
public void ToHDF5(@StdString BytePointer filename, @Cast(value="bool") boolean write_diff)
public void ToHDF5(@StdString BytePointer filename)
public void ToHDF5(@StdString String filename, @Cast(value="bool") boolean write_diff)
public void ToHDF5(@StdString String filename)
@StdString public BytePointer name()
@Const @ByRef public StringVector layer_names()
@Const @ByRef public StringVector blob_names()
@Const @ByRef public DoubleBlobSharedVector blobs()
@Const @ByRef public DoubleLayerSharedVector layers()
@Const @ByRef public DoubleBlobVectorVector bottom_vecs()
@Const @ByRef public DoubleBlobVectorVector top_vecs()
@StdVector public IntPointer top_ids(int i)
@StdVector public IntPointer bottom_ids(int i)
@Const @ByRef public BoolVectorVector bottom_need_backward()
@StdVector public DoublePointer blob_loss_weights()
@Const @ByRef public BoolVector layer_need_backward()
@Const @ByRef public DoubleBlobSharedVector params()
@Const @ByRef public DoubleBlobVector learnable_params()
@StdVector public FloatPointer params_lr()
@Const @ByRef public BoolVector has_params_lr()
@StdVector public FloatPointer params_weight_decay()
@Const @ByRef public BoolVector has_params_decay()
@Const @ByRef public StringIntMap param_names_index()
@StdVector public IntPointer param_owners()
@Const @ByRef public StringVector param_display_names()
public int num_inputs()
public int num_outputs()
@Const @ByRef public DoubleBlobVector input_blobs()
@Const @ByRef public DoubleBlobVector output_blobs()
@StdVector public IntPointer input_blob_indices()
@StdVector public IntPointer output_blob_indices()
@Cast(value="bool") public boolean has_blob(@StdString BytePointer blob_name)
@Cast(value="bool") public boolean has_blob(@StdString String blob_name)
@Const @SharedPtr @ByVal public DoubleBlob blob_by_name(@StdString BytePointer blob_name)
@Const @SharedPtr @ByVal public DoubleBlob blob_by_name(@StdString String blob_name)
@Cast(value="bool") public boolean has_layer(@StdString BytePointer layer_name)
@Cast(value="bool") public boolean has_layer(@StdString String layer_name)
public DoubleLayer layer_by_name(BytePointer layer_name)
public DoubleLayer layer_by_name(String layer_name)
@Const @Cast(value={"","boost::shared_ptr<caffe::Layer<double> >"}) @SharedPtr @ByVal public <L extends DoubleLayer> L layer_by_name(Class<L> cls, @StdString BytePointer layer_name)
@Const @Cast(value={"","boost::shared_ptr<caffe::Layer<double> >"}) @SharedPtr @ByVal public <L extends DoubleLayer> L layer_by_name(Class<L> cls, @StdString String layer_name)
public static void FilterNet(@Const @ByRef NetParameter param, NetParameter param_filtered)
@Cast(value="bool") public static boolean StateMeetsRule(@Const @ByRef NetState state, @Const @ByRef NetStateRule rule, @StdString BytePointer layer_name)
@Cast(value="bool") public static boolean StateMeetsRule(@Const @ByRef NetState state, @Const @ByRef NetStateRule rule, @StdString String layer_name)
@Cast(value="caffe::Net<double>::Callback**") @StdVector public PointerPointer before_forward()
public void add_before_forward(DoubleNet.Callback value)
@Cast(value="caffe::Net<double>::Callback**") @StdVector public PointerPointer after_forward()
public void add_after_forward(DoubleNet.Callback value)
@Cast(value="caffe::Net<double>::Callback**") @StdVector public PointerPointer before_backward()
public void add_before_backward(DoubleNet.Callback value)
@Cast(value="caffe::Net<double>::Callback**") @StdVector public PointerPointer after_backward()
public void add_after_backward(DoubleNet.Callback value)
Copyright © 2022. All rights reserved.