|
| void | Save (const std::string &fname) const |
| | Save current Tensor to file.
|
| |
| void | Save (const char *fname) const |
| |
| void | Tofile (const std::string &fname) const |
| |
| void | Tofile (const char *fname) const |
| |
| void | Tofile (std::fstream &f) const |
| |
| void | Init (const std::vector< cytnx_uint64 > &shape, const unsigned int &dtype=Type.Double, const int &device=-1) |
| | initialize a Tensor
|
| |
| | Tensor (const std::vector< cytnx_uint64 > &shape, const unsigned int &dtype=Type.Double, const int &device=-1) |
| |
| unsigned int | dtype () const |
| | the dtype-id of the Tensor
|
| |
| int | device () const |
| | the device-id of the Tensor
|
| |
| std::string | dtype_str () const |
| | the dtype (in string) of the Tensor
|
| |
| std::string | device_str () const |
| | the device (in string) of the Tensor
|
| |
| const std::vector< cytnx_uint64 > & | shape () const |
| | the shape of the Tensor
|
| |
| cytnx_uint64 | rank () const |
| | the rank of the Tensor
|
| |
| Tensor | clone () const |
| | return a clone of the current Tensor.
|
| |
| Tensor | to (const int &device) const |
| | copy a tensor to new device
|
| |
| void | to_ (const int &device) |
| | move the current Tensor to the device.
|
| |
| const bool & | is_contiguous () const |
| |
| Tensor | permute_ (const std::vector< cytnx_uint64 > &rnks) |
| |
| Tensor | permute (const std::vector< cytnx_uint64 > &rnks) const |
| | perform tensor permute on the cytnx::Tensor and return a new instance.
|
| |
| Tensor | contiguous () const |
| | Make the Tensor contiguous by coalescing the memory (storage).
|
| |
| Tensor | contiguous_ () |
| | Make the Tensor contiguous by coalescing the memory (storage), inplacely.
|
| |
| void | reshape_ (const std::vector< cytnx_int64 > &new_shape) |
| | reshape the Tensor, inplacely
|
| |
| Tensor | reshape (const std::vector< cytnx_int64 > &new_shape) const |
| | return a new Tensor that is reshaped.
|
| |
| Tensor | astype (const int &new_type) const |
| | return a new Tensor that cast to different dtype.
|
| |
| template<class T > |
| T & | at (const std::vector< cytnx_uint64 > &locator) |
| | [C++ only] get an element at specific location.
|
| |
| template<class T > |
| const T & | at (const std::vector< cytnx_uint64 > &locator) const |
| |
| template<class T > |
| T & | item () |
| | get an from a rank-0 Tensor
|
| |
| Tensor | get (const std::vector< cytnx::Accessor > &accessors) const |
| | get elements using Accessor (C++ API) / slices (python API)
|
| |
| void | set (const std::vector< cytnx::Accessor > &accessors, const Tensor &rhs) |
| | set elements with the input Tensor using Accessor (C++ API) / slices (python API)
|
| |
| template<class T > |
| void | set (const std::vector< cytnx::Accessor > &accessors, const T &rc) |
| | set elements with the input constant using Accessor (C++ API) / slices (python API)
|
| |
| Storage & | storage () const |
| | return the storage of current Tensor.
|
| |
| template<class T > |
| void | fill (const T &val) |
| | fill all the element of current Tensor with the value.
|
| |
| bool | equiv (const Tensor &rhs) |
| |
| Tensor | real () |
| |
| Tensor | imag () |
| |
| template<class T > |
| Tensor & | operator+= (const T &rc) |
| |
| template<class T > |
| Tensor & | operator-= (const T &rc) |
| |
| template<class T > |
| Tensor & | operator*= (const T &rc) |
| |
| template<class T > |
| Tensor & | operator/= (const T &rc) |
| |
| template<class T > |
| Tensor | Add (const T &rhs) |
| |
| template<class T > |
| Tensor & | Add_ (const T &rhs) |
| |
| template<class T > |
| Tensor | Sub (const T &rhs) |
| |
| template<class T > |
| Tensor & | Sub_ (const T &rhs) |
| |
| template<class T > |
| Tensor | Mul (const T &rhs) |
| |
| template<class T > |
| Tensor & | Mul_ (const T &rhs) |
| |
| template<class T > |
| Tensor | Div (const T &rhs) |
| |
| template<class T > |
| Tensor & | Div_ (const T &rhs) |
| |
| template<class T > |
| Tensor | Cpr (const T &rhs) |
| |
| template<class T > |
| Tensor | Mod (const T &rhs) |
| |
| Tensor | operator- () |
| |
| Tensor | flatten () const |
| |
| void | flatten_ () |
| |
| void | append (const Tensor &rhs) |
| |
| void | append (const Storage &srhs) |
| |
| template<class T > |
| void | append (const T &rhs) |
| |
| bool | same_data (const Tensor &rhs) const |
| |
| std::vector< Tensor > | Svd (const bool &is_U=true, const bool &is_vT=true) const |
| |
| std::vector< Tensor > | Eigh (const bool &is_V=true, const bool &row_v=false) const |
| |
| Tensor & | InvM_ () |
| |
| Tensor | InvM () const |
| |
| Tensor & | Inv_ (const double &clip) |
| |
| Tensor | Inv (const double &clip) const |
| |
| Tensor & | Conj_ () |
| |
| Tensor | Conj () const |
| |
| Tensor & | Exp_ () |
| |
| Tensor | Exp () const |
| |
| Tensor | Norm () const |
| |
| Tensor | Pow (const cytnx_double &p) const |
| |
| Tensor & | Pow_ (const cytnx_double &p) |
| |
| Tensor | Trace (const cytnx_uint64 &a, const cytnx_uint64 &b) const |
| |
| Tensor | Abs () const |
| |
| Tensor & | Abs_ () |
| |
| Tensor | Max () const |
| |
| Tensor | Min () const |
| |
| template<> |
| Tensor & | operator+= (const Tensor &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_complex128 &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_complex64 &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_double &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_float &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_int64 &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_uint64 &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_int32 &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_uint32 &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_int16 &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_uint16 &rc) |
| |
| template<> |
| Tensor & | operator+= (const cytnx_bool &rc) |
| |
| template<> |
| Tensor & | operator+= (const Scalar &rc) |
| |
| template<> |
| Tensor & | operator-= (const Tensor &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_complex128 &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_complex64 &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_double &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_float &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_int64 &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_uint64 &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_int32 &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_uint32 &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_int16 &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_uint16 &rc) |
| |
| template<> |
| Tensor & | operator-= (const cytnx_bool &rc) |
| |
| template<> |
| Tensor & | operator-= (const Scalar &rc) |
| |
| template<> |
| Tensor & | operator*= (const Tensor &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_complex128 &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_complex64 &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_double &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_float &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_int64 &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_uint64 &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_int32 &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_uint32 &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_int16 &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_uint16 &rc) |
| |
| template<> |
| Tensor & | operator*= (const cytnx_bool &rc) |
| |
| template<> |
| Tensor & | operator*= (const Scalar &rc) |
| |
| template<> |
| Tensor & | operator/= (const Tensor &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_complex128 &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_complex64 &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_double &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_float &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_int64 &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_uint64 &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_int32 &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_uint32 &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_int16 &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_uint16 &rc) |
| |
| template<> |
| Tensor & | operator/= (const cytnx_bool &rc) |
| |
| template<> |
| Tensor & | operator/= (const Scalar &rc) |
| |
an tensor (multi-dimensional array)
return a new Tensor that cast to different dtype.
- Parameters
-
| new_type | the new dtype. It can be any type defined in cytnx::Type |
- Returns
- [Tensor]
Note:
If the new_type is the same as dtype of the current Tensor, return self.
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
an tensor (multi-dimensional array)
Definition Tensor.hpp:289
Tensor astype(const int &new_type) const
return a new Tensor that cast to different dtype.
Definition Tensor.hpp:944
int device() const
the device-id of the Tensor
Definition Tensor.hpp:668
Definition Accessor.hpp:12
Tensor zeros(const cytnx_uint64 &Nelem, const unsigned int &dtype=Type.Double, const int &device=Device.cpu)
create an rank-1 Tensor with all the elements are initialized with zero.
Definition Generator.cpp:10
Type_class Type
Definition Type.cpp:137
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Uint64
cytnx device: CPU
Shape : (3,4,5)
[[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]
[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]
[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]]
1
python API:
from cytnx import *
A = zeros([3,4,5],dtype=Type.Double)
print(A)
B = A.astype(Type.Uint64)
print(B)
C = A.astype(Type.Double)
print(C is A)
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Uint64
cytnx device: CPU
Shape : (3,4,5)
[[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]
[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]
[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]]
True
| Tensor cytnx::Tensor::contiguous |
( |
| ) |
const |
|
inline |
Make the Tensor contiguous by coalescing the memory (storage).
- Returns
- [Tensor] a new Tensor that is with contiguous memory (storage).
See also Tensor::contiguous_()
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
Tensor contiguous() const
Make the Tensor contiguous by coalescing the memory (storage).
Definition Tensor.hpp:834
const bool & is_contiguous() const
Definition Tensor.hpp:772
Tensor permute(const std::vector< cytnx_uint64 > &rnks) const
perform tensor permute on the cytnx::Tensor and return a new instance.
Definition Tensor.hpp:804
const std::vector< cytnx_uint64 > & shape() const
the shape of the Tensor
Definition Tensor.hpp:689
output>
Vector Print:
Total Elements:3
[3, 4, 5]
Vector Print:
Total Elements:3
[3, 5, 4]
0
1
Vector Print:
Total Elements:3
[3, 5, 4]
0
1
python API:
from cytnx import *
A = Tensor([3,4,5])
print(A.shape())
B = A.permute(0,2,1)
print(B.shape())
C = B.contiguous()
print(B.is_contiguous())
print(C.is_contiguous())
print(C.shape())
output>
[3, 4, 5]
[3, 5, 4]
False
True
[3, 5, 4]
| Tensor cytnx::Tensor::contiguous_ |
( |
| ) |
|
|
inline |
Make the Tensor contiguous by coalescing the memory (storage), inplacely.
See also Tensor::contiguous()
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
output>
Vector Print:
Total Elements:3
[3, 4, 5]
Vector Print:
Total Elements:3
[3, 5, 4]
0
1
python API:
from cytnx import *
A = Tensor([3,4,5])
print(A.shape())
B = A.permute(0,2,1)
print(B.shape())
print(B.is_contiguous())
B.contiguous_()
print(B.is_contiguous())
output>
[3, 4, 5]
[3, 5, 4]
False
True
fill all the element of current Tensor with the value.
- Parameters
-
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
void fill(const T &val)
fill all the element of current Tensor with the value.
Definition Tensor.hpp:1159
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]]
[[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]]
[[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]]]
python API
from cytnx import *
A = arange(60).reshape(3,4,5)
print(A)
A.fill(999)
print(A)
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]]
[[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]]
[[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]
[9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 9.99000e+02 ]]]
get elements using Accessor (C++ API) / slices (python API)
- Returns
- [Tensor]
See also Accessor for cordinate with Accessor in C++ API.
Note:
- the return will be a new Tensor instance, which not share memory with the current Tensor.
Equivalently:
One can also using more intruisive way to get the slice using [] operator.
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
object that mimic the python slice to access elements in C++ [this is for c++ API only].
Definition Accessor.hpp:16
static Accessor all()
access the whole rank, this is similar to [:] in python
Definition Accessor.hpp:125
static Accessor range(const cytnx_int64 &min, const cytnx_int64 &max, const cytnx_int64 &step=1)
access the range at assigned rank, this is similar to min:max:step in python
Definition Accessor.hpp:147
Accessor ac
Definition SparseUniTensor.cpp:11
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]]
Total elem: 12
type : Double (Float64)
cytnx device: CPU
Shape : (4,3)
[[4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.70000e+01 5.80000e+01 5.90000e+01 ]]
Total elem: 12
type : Double (Float64)
cytnx device: CPU
Shape : (4,3)
[[4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.70000e+01 5.80000e+01 5.90000e+01 ]]
python API:
from cytnx import *
A = arange(60).reshape(3,4,5)
print(A)
B = A[2,:,2:5:1]
print(B)
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]]
Total elem: 12
type : Double (Float64)
cytnx device: CPU
Shape : (4,3)
[[4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.70000e+01 5.80000e+01 5.90000e+01 ]]
initialize a Tensor
- Parameters
-
| shape | the shape of tensor. |
| dtype | the dtype of tensor. This can be any of type defined in cytnx::Type
|
| device | the device that tensor to be created. This can be cytnx::Device.cpu or cytnx::Device.cuda+<gpuid> |
[Note]
- the content of Tensor created will be un-initialize! See zeros(), ones() or arange() for generating an Tensor.
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
void Init(const std::vector< cytnx_uint64 > &shape, const unsigned int &dtype=Type.Double, const int &device=-1)
initialize a Tensor
Definition Tensor.hpp:637
Device_class Device
Definition Device.cpp:105
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Uint64
cytnx device: CPU
Shape : (3,4,5)
[[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]
[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]
[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CUDA/GPU-id:0
Shape : (3,4,5)
[[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
python API:
from cytnx import *
A = Tensor([3,4,5])
print(A)
B = Tensor([3,4,5],dtype=Type.Uint64)
print(B)
C = Tensor([3,4,5],dtype=Type.Double,device=Device.cuda+0);
print(C)
D = Tensor()
D.Init([3,4,5],dtype=Type.Double,device=Device.cpu);
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Uint64
cytnx device: CPU
Shape : (3,4,5)
[[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]
[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]
[[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]
[ 0 0 0 0 0 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CUDA/GPU-id:0
Shape : (3,4,5)
[[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]
[[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
return a new Tensor that is reshaped.
- Parameters
-
| new_shape | the new shape of the Tensor. |
- Returns
- [Tensor]
See also Tensor::reshape_()
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (5,12)
[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 1.00000e+01 1.10000e+01 ]
[1.20000e+01 1.30000e+01 1.40000e+01 1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 ]
[2.40000e+01 2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 3.50000e+01 ]
[3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 4.50000e+01 4.60000e+01 4.70000e+01 ]
[4.80000e+01 4.90000e+01 5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (5,12)
[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 1.00000e+01 1.10000e+01 ]
[1.20000e+01 1.30000e+01 1.40000e+01 1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 ]
[2.40000e+01 2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 3.50000e+01 ]
[3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 4.50000e+01 4.60000e+01 4.70000e+01 ]
[4.80000e+01 4.90000e+01 5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]
python API:
from cytnx import *
A = arange(60)
B = A.reshape(5,12)
print(A)
print(B)
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (5,12)
[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 1.00000e+01 1.10000e+01 ]
[1.20000e+01 1.30000e+01 1.40000e+01 1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 ]
[2.40000e+01 2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 3.50000e+01 ]
[3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 4.50000e+01 4.60000e+01 4.70000e+01 ]
[4.80000e+01 4.90000e+01 5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (5,12)
[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 1.00000e+01 1.10000e+01 ]
[1.20000e+01 1.30000e+01 1.40000e+01 1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 ]
[2.40000e+01 2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 3.50000e+01 ]
[3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 4.50000e+01 4.60000e+01 4.70000e+01 ]
[4.80000e+01 4.90000e+01 5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]
reshape the Tensor, inplacely
- Parameters
-
| new_shape | the new shape of the Tensor. |
See also Tensor::reshape()
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (60)
[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (5,12)
[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 1.00000e+01 1.10000e+01 ]
[1.20000e+01 1.30000e+01 1.40000e+01 1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 ]
[2.40000e+01 2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 3.50000e+01 ]
[3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 4.50000e+01 4.60000e+01 4.70000e+01 ]
[4.80000e+01 4.90000e+01 5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (5,4,3)
[[[0.00000e+00 1.00000e+00 2.00000e+00 ]
[3.00000e+00 4.00000e+00 5.00000e+00 ]
[6.00000e+00 7.00000e+00 8.00000e+00 ]
[9.00000e+00 1.00000e+01 1.10000e+01 ]]
[[1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 ]
[1.80000e+01 1.90000e+01 2.00000e+01 ]
[2.10000e+01 2.20000e+01 2.30000e+01 ]]
[[2.40000e+01 2.50000e+01 2.60000e+01 ]
[2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 ]
[3.30000e+01 3.40000e+01 3.50000e+01 ]]
[[3.60000e+01 3.70000e+01 3.80000e+01 ]
[3.90000e+01 4.00000e+01 4.10000e+01 ]
[4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 ]]
[[4.80000e+01 4.90000e+01 5.00000e+01 ]
[5.10000e+01 5.20000e+01 5.30000e+01 ]
[5.40000e+01 5.50000e+01 5.60000e+01 ]
[5.70000e+01 5.80000e+01 5.90000e+01 ]]]
python API:
from cytnx import *
A = arange(60)
print(A)
A.reshape_(5,12)
print(A)
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (60)
[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (5,12)
[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 1.00000e+01 1.10000e+01 ]
[1.20000e+01 1.30000e+01 1.40000e+01 1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 ]
[2.40000e+01 2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 3.50000e+01 ]
[3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 4.50000e+01 4.60000e+01 4.70000e+01 ]
[4.80000e+01 4.90000e+01 5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]
set elements with the input constant using Accessor (C++ API) / slices (python API)
- Parameters
-
| accessors | the list(vector) of accessors. |
| rc | [Const] |
See also Tensor::fill for filling all elements with assigned constant.
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
void set(const std::vector< cytnx::Accessor > &accessors, const Tensor &rhs)
set elements with the input Tensor using Accessor (C++ API) / slices (python API)
Definition Tensor.hpp:1096
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]]
Total elem: 12
type : Double (Float64)
cytnx device: CPU
Shape : (4,3)
[[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[4.50000e+01 4.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.00000e+01 5.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.50000e+01 5.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 9.99000e+02 9.99000e+02 9.99000e+02 ]
[4.50000e+01 4.60000e+01 9.99000e+02 9.99000e+02 9.99000e+02 ]
[5.00000e+01 5.10000e+01 9.99000e+02 9.99000e+02 9.99000e+02 ]
[5.50000e+01 5.60000e+01 9.99000e+02 9.99000e+02 9.99000e+02 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[4.50000e+01 4.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.00000e+01 5.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.50000e+01 5.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[9.99000e+02 9.99000e+02 0.00000e+00 0.00000e+00 0.00000e+00 ]
[9.99000e+02 9.99000e+02 0.00000e+00 0.00000e+00 0.00000e+00 ]
[9.99000e+02 9.99000e+02 0.00000e+00 0.00000e+00 0.00000e+00 ]
[9.99000e+02 9.99000e+02 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
python API:
from cytnx import *
A = arange(60).reshape(3,4,5)
print(A)
B = zeros([4,3])
print(B)
A[2,:,2:5:1] = B
print(A)
A[0,:,0:2:1] = 999
print(A)
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]]
Total elem: 12
type : Double (Float64)
cytnx device: CPU
Shape : (4,3)
[[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[4.50000e+01 4.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.00000e+01 5.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.50000e+01 5.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[9.99000e+02 9.99000e+02 2.00000e+00 3.00000e+00 4.00000e+00 ]
[9.99000e+02 9.99000e+02 7.00000e+00 8.00000e+00 9.00000e+00 ]
[9.99000e+02 9.99000e+02 1.20000e+01 1.30000e+01 1.40000e+01 ]
[9.99000e+02 9.99000e+02 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[4.50000e+01 4.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.00000e+01 5.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.50000e+01 5.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
set elements with the input Tensor using Accessor (C++ API) / slices (python API)
- Parameters
-
| accessors | the list(vector) of accessors. |
| rhs | [Tensor] |
Note:
the shape of the input Tensor should be the same as the shape that indicated using Accessor. The memory is not shared with the input Tensor.
Example:
c++ API:
#include <iostream>
using namespace std;
return 0;
}
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]]
Total elem: 12
type : Double (Float64)
cytnx device: CPU
Shape : (4,3)
[[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[4.50000e+01 4.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.00000e+01 5.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.50000e+01 5.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 9.99000e+02 9.99000e+02 9.99000e+02 ]
[4.50000e+01 4.60000e+01 9.99000e+02 9.99000e+02 9.99000e+02 ]
[5.00000e+01 5.10000e+01 9.99000e+02 9.99000e+02 9.99000e+02 ]
[5.50000e+01 5.60000e+01 9.99000e+02 9.99000e+02 9.99000e+02 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[4.50000e+01 4.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.00000e+01 5.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.50000e+01 5.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[9.99000e+02 9.99000e+02 0.00000e+00 0.00000e+00 0.00000e+00 ]
[9.99000e+02 9.99000e+02 0.00000e+00 0.00000e+00 0.00000e+00 ]
[9.99000e+02 9.99000e+02 0.00000e+00 0.00000e+00 0.00000e+00 ]
[9.99000e+02 9.99000e+02 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
python API:
from cytnx import *
A = arange(60).reshape(3,4,5)
print(A)
B = zeros([4,3])
print(B)
A[2,:,2:5:1] = B
print(A)
A[0,:,0:2:1] = 999
print(A)
output>
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 4.20000e+01 4.30000e+01 4.40000e+01 ]
[4.50000e+01 4.60000e+01 4.70000e+01 4.80000e+01 4.90000e+01 ]
[5.00000e+01 5.10000e+01 5.20000e+01 5.30000e+01 5.40000e+01 ]
[5.50000e+01 5.60000e+01 5.70000e+01 5.80000e+01 5.90000e+01 ]]]
Total elem: 12
type : Double (Float64)
cytnx device: CPU
Shape : (4,3)
[[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]
[0.00000e+00 0.00000e+00 0.00000e+00 ]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[0.00000e+00 1.00000e+00 2.00000e+00 3.00000e+00 4.00000e+00 ]
[5.00000e+00 6.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 ]
[1.00000e+01 1.10000e+01 1.20000e+01 1.30000e+01 1.40000e+01 ]
[1.50000e+01 1.60000e+01 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[4.50000e+01 4.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.00000e+01 5.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.50000e+01 5.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]]]
Total elem: 60
type : Double (Float64)
cytnx device: CPU
Shape : (3,4,5)
[[[9.99000e+02 9.99000e+02 2.00000e+00 3.00000e+00 4.00000e+00 ]
[9.99000e+02 9.99000e+02 7.00000e+00 8.00000e+00 9.00000e+00 ]
[9.99000e+02 9.99000e+02 1.20000e+01 1.30000e+01 1.40000e+01 ]
[9.99000e+02 9.99000e+02 1.70000e+01 1.80000e+01 1.90000e+01 ]]
[[2.00000e+01 2.10000e+01 2.20000e+01 2.30000e+01 2.40000e+01 ]
[2.50000e+01 2.60000e+01 2.70000e+01 2.80000e+01 2.90000e+01 ]
[3.00000e+01 3.10000e+01 3.20000e+01 3.30000e+01 3.40000e+01 ]
[3.50000e+01 3.60000e+01 3.70000e+01 3.80000e+01 3.90000e+01 ]]
[[4.00000e+01 4.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[4.50000e+01 4.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.00000e+01 5.10000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]
[5.50000e+01 5.60000e+01 0.00000e+00 0.00000e+00 0.00000e+00 ]]]