@Namespace(value="torch::nn") @NoOffset @Properties(inherit=torch.class) public class MultiheadAttentionImpl extends MultiheadAttentionImplCloneable
torch::nn::MultiheadAttentionOptions
class to
learn what constructor arguments are supported for this module.
Example:
MultiheadAttention model(MultiheadAttentionOptions(20, 10).bias(false));
Pointer.CustomDeallocator, Pointer.Deallocator, Pointer.NativeDeallocator, Pointer.ReferenceCounter
Constructor and Description |
---|
MultiheadAttentionImpl(long embed_dim,
long num_heads) |
MultiheadAttentionImpl(Module pointer)
Downcast constructor.
|
MultiheadAttentionImpl(MultiheadAttentionOptions options_) |
MultiheadAttentionImpl(Pointer p)
Pointer cast constructor.
|
asModule, asModule, clone, clone
apply, apply, apply, apply, apply, apply, apply, apply, buffers, buffers, children, eval, is_serializable, is_training, load, modules, modules, name, named_buffers, named_buffers, named_children, named_modules, named_modules, named_modules, named_parameters, named_parameters, parameters, parameters, pretty_print, register_buffer, register_buffer, register_module, register_module, register_parameter, register_parameter, register_parameter, register_parameter, save, shiftLeft, to, to, to, train, unregister_module, unregister_module, zero_grad
address, asBuffer, asByteBuffer, availablePhysicalBytes, calloc, capacity, capacity, close, deallocate, deallocate, deallocateReferences, deallocator, deallocator, equals, fill, formatBytes, free, getDirectBufferAddress, getPointer, getPointer, getPointer, getPointer, hashCode, interruptDeallocatorThread, isNull, isNull, limit, limit, malloc, maxBytes, maxPhysicalBytes, memchr, memcmp, memcpy, memmove, memset, offsetAddress, offsetof, offsetof, parseBytes, physicalBytes, physicalBytesInaccurate, position, position, put, realloc, referenceCount, releaseReference, retainReference, setNull, sizeof, sizeof, toString, totalBytes, totalCount, totalPhysicalBytes, withDeallocator, zero
public MultiheadAttentionImpl(Pointer p)
Pointer(Pointer)
.public MultiheadAttentionImpl(Module pointer)
public MultiheadAttentionImpl(@Cast(value="int64_t") long embed_dim, @Cast(value="int64_t") long num_heads)
public MultiheadAttentionImpl(@Const @ByRef MultiheadAttentionOptions options_)
@ByVal public T_TensorTensor_T forward(@Const @ByRef Tensor query, @Const @ByRef Tensor key, @Const @ByRef Tensor value, @Const @ByRef(nullValue="torch::Tensor{}") Tensor key_padding_mask, @Cast(value="bool") boolean need_weights, @Const @ByRef(nullValue="torch::Tensor{}") Tensor attn_mask, @Cast(value="bool") boolean average_attn_weights)
@ByVal public T_TensorTensor_T forward(@Const @ByRef Tensor query, @Const @ByRef Tensor key, @Const @ByRef Tensor value)
public void reset()
MultiheadAttentionImplCloneable
reset()
must perform initialization of all members with reference
semantics, most importantly parameters, buffers and submodules.reset
in class MultiheadAttentionImplCloneable
public void _reset_parameters()
@ByRef public MultiheadAttentionOptions options()
Module
was constructed.public MultiheadAttentionImpl options(MultiheadAttentionOptions setter)
public MultiheadAttentionImpl _qkv_same_embed_dim(boolean setter)
public MultiheadAttentionImpl in_proj_weight(Tensor setter)
public MultiheadAttentionImpl in_proj_bias(Tensor setter)
public MultiheadAttentionImpl bias_k(Tensor setter)
public MultiheadAttentionImpl bias_v(Tensor setter)
public MultiheadAttentionImpl q_proj_weight(Tensor setter)
public MultiheadAttentionImpl k_proj_weight(Tensor setter)
public MultiheadAttentionImpl v_proj_weight(Tensor setter)
public MultiheadAttentionImpl head_dim(long setter)
Copyright © 2024. All rights reserved.