Package | Description |
---|---|
org.bytedeco.pytorch | |
org.bytedeco.pytorch.cuda | |
org.bytedeco.pytorch.global |
Modifier and Type | Method and Description |
---|---|
DataPtr |
Allocator._allocate(long n) |
DataPtr |
DataPtrVector.back() |
DataPtr |
PlacementDeleteContext.data_ptr_() |
DataPtr |
StorageImpl.data_ptr() |
DataPtr |
Storage.data_ptr() |
DataPtr |
DataPtrVector.front() |
DataPtr |
DataPtrVector.Iterator.get() |
DataPtr |
DataPtrVector.get(long i) |
DataPtr |
T_DataPtrSizeT_T.get0() |
static DataPtr |
T_DataPtrSizeT_T.get0(T_DataPtrSizeT_T container) |
DataPtr |
DataPtr.getPointer(long i) |
static DataPtr |
PlacementDeleteContext.makeDataPtr(DataPtr data_ptr,
PlacementConsumer placement_dtor,
long size,
Device device) |
DataPtr |
StorageImpl.mutable_data_ptr() |
DataPtr |
Storage.mutable_data_ptr() |
DataPtr |
DataPtr.position(long position) |
DataPtr |
StorageImpl.set_data_ptr(DataPtr data_ptr) |
DataPtr |
Storage.set_data_ptr(DataPtr data_ptr) |
Modifier and Type | Method and Description |
---|---|
PlacementDeleteContext |
PlacementDeleteContext.data_ptr_(DataPtr setter) |
static DataPtr |
PlacementDeleteContext.makeDataPtr(DataPtr data_ptr,
PlacementConsumer placement_dtor,
long size,
Device device) |
void |
DeviceGuardImplInterface.recordDataPtrOnStream(DataPtr arg0,
Stream arg1)
Ensure the caching allocator (if any) is aware that the given DataPtr is
being used on the given stream, and that it should thus avoid recycling the
DataPtr until all work on that stream is done.
|
void |
StorageImpl.set_data_ptr_noswap(DataPtr data_ptr) |
void |
Storage.set_data_ptr_noswap(DataPtr data_ptr) |
DataPtr |
StorageImpl.set_data_ptr(DataPtr data_ptr) |
DataPtr |
Storage.set_data_ptr(DataPtr data_ptr) |
void |
TensorImpl.ShareExternalPointer(DataPtr data_ptr,
TypeMeta data_type,
long size_bytes) |
void |
StorageImpl.UniqueStorageShareExternalPointer(DataPtr data_ptr,
long size_bytes)
Can only be called when use_count is 1
|
void |
Storage.UniqueStorageShareExternalPointer(DataPtr data_ptr,
long _capacity) |
Constructor and Description |
---|
PlacementDeleteContext(DataPtr data_ptr,
PlacementConsumer placement_dtor,
long size) |
Storage(Storage.use_byte_size_t arg0,
long size_bytes,
DataPtr data_ptr) |
Storage(Storage.use_byte_size_t arg0,
long size_bytes,
DataPtr data_ptr,
Allocator allocator,
boolean resizable) |
StorageImpl(StorageImpl.use_byte_size_t arg0,
SymInt size_bytes,
DataPtr data_ptr,
Allocator allocator,
boolean resizable) |
Modifier and Type | Method and Description |
---|---|
void |
CUDAAllocator.recordStream(DataPtr arg0,
CUDAStream stream) |
Modifier and Type | Method and Description |
---|---|
static boolean |
torch.equals(DataPtr dp,
PointerPointer arg1) |
static boolean |
torch.equals(PointerPointer arg0,
DataPtr dp) |
static boolean |
torch.notEquals(DataPtr dp,
PointerPointer arg1) |
static boolean |
torch.notEquals(PointerPointer arg0,
DataPtr dp) |
Copyright © 2024. All rights reserved.