@Name(value="caffe::Layer<float>") @NoOffset @Properties(inherit=caffe.class) public class FloatLayer extends Pointer
Pointer.CustomDeallocator, Pointer.Deallocator, Pointer.NativeDeallocator, Pointer.ReferenceCounter
Constructor and Description |
---|
FloatLayer(LayerParameter param)
You should not implement your own constructor.
|
FloatLayer(Pointer p)
Pointer cast constructor.
|
Modifier and Type | Method and Description |
---|---|
boolean |
AllowForceBackward(int bottom_index)
\brief Return whether to allow force_backward for a given bottom blob
index.
|
boolean |
AutoTopBlobs()
\brief Return whether "anonymous" top blobs are created automatically
by the layer.
|
protected void |
Backward_cpu(FloatBlobVector top,
BoolVector propagate_down,
FloatBlobVector bottom) |
protected void |
Backward_gpu(FloatBlobVector top,
BoolVector propagate_down,
FloatBlobVector bottom) |
void |
Backward(FloatBlobVector top,
BoolVector propagate_down,
FloatBlobVector bottom)
\brief Given the top blob error gradients, compute the bottom blob error
gradients.
|
FloatBlobSharedVector |
blobs()
\brief Returns the vector of learnable parameter blobs.
|
protected void |
CheckBlobCounts(FloatBlobVector bottom,
FloatBlobVector top) |
boolean |
EqualNumBottomTopBlobs()
\brief Returns true if the layer requires an equal number of bottom and
top blobs.
|
int |
ExactNumBottomBlobs()
\brief Returns the exact number of bottom blobs required by the layer,
or -1 if no exact number is required.
|
int |
ExactNumTopBlobs()
\brief Returns the exact number of top blobs required by the layer,
or -1 if no exact number is required.
|
protected void |
Forward_cpu(FloatBlobVector bottom,
FloatBlobVector top) |
protected void |
Forward_gpu(FloatBlobVector bottom,
FloatBlobVector top) |
float |
Forward(FloatBlobVector bottom,
FloatBlobVector top)
\brief Given the bottom blobs, compute the top blobs and the loss.
|
LayerParameter |
layer_param()
\brief Returns the layer parameter.
|
void |
LayerSetUp(FloatBlobVector bottom,
FloatBlobVector top)
\brief Does layer-specific setup: your layer should implement this function
as well as Reshape.
|
float |
loss(int top_index)
\brief Returns the scalar loss associated with a top blob at a given index.
|
int |
MaxBottomBlobs()
\brief Returns the maximum number of bottom blobs required by the layer,
or -1 if no maximum number is required.
|
int |
MaxTopBlobs()
\brief Returns the maximum number of top blobs required by the layer,
or -1 if no maximum number is required.
|
int |
MinBottomBlobs()
\brief Returns the minimum number of bottom blobs required by the layer,
or -1 if no minimum number is required.
|
int |
MinTopBlobs()
\brief Returns the minimum number of top blobs required by the layer,
or -1 if no minimum number is required.
|
boolean |
param_propagate_down(int param_id)
\brief Specifies whether the layer should compute gradients w.r.t.
|
void |
Reshape(FloatBlobVector bottom,
FloatBlobVector top)
\brief Adjust the shapes of top blobs and internal buffers to accommodate
the shapes of the bottom blobs.
|
void |
set_loss(int top_index,
float value)
\brief Sets the loss associated with a top blob at a given index.
|
void |
set_param_propagate_down(int param_id,
boolean value)
\brief Sets whether the layer should compute gradients w.r.t.
|
void |
SetUp(FloatBlobVector bottom,
FloatBlobVector top)
\brief Implements common layer setup functionality.
|
void |
ToProto(LayerParameter param,
boolean write_diff)
\brief Writes the layer parameter to a protocol buffer
|
BytePointer |
type()
\brief Returns the layer type.
|
address, asBuffer, asByteBuffer, availablePhysicalBytes, calloc, capacity, capacity, close, deallocate, deallocate, deallocateReferences, deallocator, deallocator, equals, fill, formatBytes, free, getDirectBufferAddress, getPointer, getPointer, getPointer, getPointer, hashCode, interruptDeallocatorThread, isNull, isNull, limit, limit, malloc, maxBytes, maxPhysicalBytes, memchr, memcmp, memcpy, memmove, memset, offsetAddress, offsetof, offsetof, parseBytes, physicalBytes, physicalBytesInaccurate, position, position, put, realloc, referenceCount, releaseReference, retainReference, setNull, sizeof, sizeof, toString, totalBytes, totalCount, totalPhysicalBytes, withDeallocator, zero
public FloatLayer(Pointer p)
Pointer(Pointer)
.public FloatLayer(@Const @ByRef LayerParameter param)
public void SetUp(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top)
bottom
- the preshaped input blobstop
- the allocated but unshaped output blobs, to be shaped by Reshape
Checks that the number of bottom and top blobs is correct.
Calls LayerSetUp to do special layer setup for individual layer types,
followed by Reshape to set up sizes of top blobs and internal buffers.
Sets up the loss weight multiplier blobs for any non-zero loss weights.
This method may not be overridden.@Virtual public void LayerSetUp(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top)
bottom
- the preshaped input blobs, whose data fields store the input data for
this layertop
- the allocated but unshaped output blobs
This method should do one-time layer specific setup. This includes reading
and processing relevent parameters from the layer_param_
.
Setting up the shapes of top blobs and internal buffers should be done in
Reshape
, which will be called before the forward pass to
adjust the top blob sizes.@Virtual(value=true) public void Reshape(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top)
bottom
- the input blobs, with the requested input shapestop
- the top blobs, which should be reshaped as needed
This method should reshape top blobs as needed according to the shapes
of the bottom (input) blobs, as well as reshaping any internal buffers
and making any other necessary adjustments so that the layer can
accommodate the bottom blobs.public float Forward(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top)
bottom
- the input blobs, whose data fields store the input data for this layertop
- the preshaped output blobs, whose data fields will store this layers'
outputspublic void Backward(@Const @ByRef FloatBlobVector top, @Const @ByRef BoolVector propagate_down, @Const @ByRef FloatBlobVector bottom)
top
- the output blobs, whose diff fields store the gradient of the error
with respect to themselvespropagate_down
- a vector with equal length to bottom, with each index indicating
whether to propagate the error gradients down to the bottom blob at
the corresponding indexbottom
- the input blobs, whose diff fields will store the gradient of the error
with respect to themselves after Backward is run
The Backward wrapper calls the relevant device wrapper function
(Backward_cpu or Backward_gpu) to compute the bottom blob diffs given the
top blob diffs.
Your layer should implement Backward_cpu and (optionally) Backward_gpu.@ByRef public FloatBlobSharedVector blobs()
@Const @ByRef public LayerParameter layer_param()
@Virtual public void ToProto(LayerParameter param, @Cast(value="bool") boolean write_diff)
public float loss(@Const int top_index)
public void set_loss(@Const int top_index, @Const float value)
@Virtual @Const(value={false,false,true}) @Cast(value="const char*") public BytePointer type()
@Virtual @Const(value={false,false,true}) public int ExactNumBottomBlobs()
@Virtual @Const(value={false,false,true}) public int MinBottomBlobs()
@Virtual @Const(value={false,false,true}) public int MaxBottomBlobs()
@Virtual @Const(value={false,false,true}) public int ExactNumTopBlobs()
@Virtual @Const(value={false,false,true}) public int MinTopBlobs()
@Virtual @Const(value={false,false,true}) public int MaxTopBlobs()
@Virtual @Cast(value="bool") @Const(value={false,false,true}) public boolean EqualNumBottomTopBlobs()
@Virtual @Cast(value="bool") @Const(value={false,false,true}) public boolean AutoTopBlobs()
@Virtual @Cast(value="bool") @Const(value={false,false,true}) public boolean AllowForceBackward(@Const int bottom_index)
@Cast(value="bool") public boolean param_propagate_down(@Const int param_id)
public void set_param_propagate_down(@Const int param_id, @Cast(value="const bool") boolean value)
@Virtual(value=true) protected void Forward_cpu(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top)
@Virtual protected void Forward_gpu(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top)
@Virtual(value=true) protected void Backward_cpu(@Const @ByRef FloatBlobVector top, @Const @ByRef BoolVector propagate_down, @Const @ByRef FloatBlobVector bottom)
@Virtual protected void Backward_gpu(@Const @ByRef FloatBlobVector top, @Const @ByRef BoolVector propagate_down, @Const @ByRef FloatBlobVector bottom)
@Virtual protected void CheckBlobCounts(@Const @ByRef FloatBlobVector bottom, @Const @ByRef FloatBlobVector top)
Copyright © 2022. All rights reserved.