tensor初始化
1 2 3
| my_tensor=torch.tensor([[1,2,3],[4,5,6]]) print(my_tensor)
|
tensor([[1, 2, 3],
[4, 5, 6]])
1 2 3
| my_tensor=torch.tensor([[1,2,3],[4,5,6]],dtype=torch.float32) print(my_tensor)
|
tensor([[1., 2., 3.],
[4., 5., 6.]])
1 2 3
| my_tensor=torch.tensor([[1,2,3],[4,5,6]],dtype=torch.float32,device='cuda') print(my_tensor)
|
tensor([[1., 2., 3.],
[4., 5., 6.]], device='cuda:0')
1 2 3
| device="cuda" if torch.cuda.is_available() else "cpu" print(device)
|
cuda
1 2 3 4
|
my_tensor=torch.tensor([[1,2,3],[4,5,6]],dtype=torch.float32,device='cuda',requires_grad=True) print(my_tensor)
|
tensor([[1., 2., 3.],
[4., 5., 6.]], device='cuda:0', requires_grad=True)
1 2 3 4
| print(my_tensor.device) print(my_tensor.requires_grad) print(my_tensor.dtype) print(my_tensor.shape)
|
cuda:0
True
torch.float32
torch.Size([2, 3])
1 2
| x=torch.empty(size=(3,3)) print(x)
|
tensor([[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.]])
1 2 3
| x=torch.zeros((3,3)) print(x)
|
1 2 3
| x=torch.ones((3,3)) print(x)
|
tensor([[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]])
1 2 3
| x=torch.rand((3,3)) print(x)
|
tensor([[0.4704, 0.6278, 0.2294],
[0.1838, 0.4951, 0.8452],
[0.7331, 0.8264, 0.9475]])
1 2 3
| x=torch.randint(10, (2, 2)) print(x)
|
tensor([[7, 8],
[9, 3]])
1 2 3
| x = torch.randn(2, 3) print(x)
|
tensor([[ 0.7711, -1.2354, -0.6476],
[ 1.2426, -0.8406, -1.1450]])
1 2 3
| x=torch.eye(5,5) print(x)
|
tensor([[1., 0., 0., 0., 0.],
[0., 1., 0., 0., 0.],
[0., 0., 1., 0., 0.],
[0., 0., 0., 1., 0.],
[0., 0., 0., 0., 1.]])
1 2 3
| x=torch.arange(start=0,end=5,step=1) print(x)
|
tensor([0, 1, 2, 3, 4])
1 2 3
| x=torch.linspace(start=0.1,end=1,steps=10) print(x)
|
tensor([0.1000, 0.2000, 0.3000, 0.4000, 0.5000, 0.6000, 0.7000, 0.8000, 0.9000,
1.0000])
1 2 3
| x=torch.empty(size=(1,5)).normal_(mean=0,std=1) print(x)
|
tensor([[ 0.3601, -1.2583, -0.1399, 0.9521, 1.1094]])
1 2 3
| x=torch.empty(size=(1,5)).uniform_(0,1) print(x)
|
tensor([[0.7726, 0.3465, 0.3890, 0.4156, 0.4586]])
1 2 3 4 5 6 7
| x=torch.diag(torch.ones(3)) print(x)
xx=torch.diag(torch.ones(3,6)) print(xx)
|
tensor([[1., 0., 0.],
[0., 1., 0.],
[0., 0., 1.]])
tensor([1., 1., 1.])
tensor数据类型转换
1 2 3 4 5 6 7 8 9 10 11
| tensor=torch.arange(4) print('tensor:',tensor) print('tensor.bool:',tensor.bool()) print('tensor.short():',tensor.short()) print('tensor.long():',tensor.long()) print('tensor.long().dtype:',tensor.long().dtype) print('tensor.half():',tensor.half()) print('tensor.float():',tensor.float()) print('tensor.float().dtype:',tensor.float().dtype) print('tensor.double():',tensor.double())
|
tensor: tensor([0, 1, 2, 3])
tensor.bool: tensor([False, True, True, True])
tensor.short(): tensor([0, 1, 2, 3], dtype=torch.int16)
tensor.long(): tensor([0, 1, 2, 3])
tensor.long().dtype: torch.int64
tensor.half(): tensor([0., 1., 2., 3.], dtype=torch.float16)
tensor.float(): tensor([0., 1., 2., 3.])
tensor.float().dtype: torch.float32
tensor.double(): tensor([0., 1., 2., 3.], dtype=torch.float64)
tensor与np array 互转
1 2
| import numpy as np np_array=np.zeros((5,5))
|
1 2 3
| tensor=torch.from_numpy(np_array) print(tensor)
|
tensor([[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.]], dtype=torch.float64)
1 2 3
| np_array_back=tensor.numpy() print(np_array_back)
|
[[0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0.]
[0. 0. 0. 0. 0.]]
tensor的数学运算
1 2
| x=torch.tensor([1,2,3]) y=torch.tensor([9,8,7])
|
1 2 3 4 5 6 7 8 9 10 11 12
| z1=torch.empty(3)
z1=torch.add(x,y,out=z1) print(z1)
z2=torch.add(x,y) print(z2)
z=x+y print(z)
|
tensor([10., 10., 10.])
tensor([10, 10, 10])
tensor([10, 10, 10])
tensor([-8, -6, -4])
1 2 3
| z=torch.true_divide(x,y) print(z)
|
tensor([0.1111, 0.2500, 0.4286])
1 2 3 4 5
|
t=torch.zeros(3) t.add_(x) print(t)
|
tensor([1., 2., 3.])
1 2 3 4 5 6 7 8
| x=torch.tensor([1,2,3]) z=x.pow(2) print(z)
z=x**2 print(z)
|
tensor([1, 4, 9])
tensor([1, 4, 9])
1 2 3 4
| x=torch.tensor([1,0,3]) z=x>0 print(z)
|
tensor([ True, False, True])
1 2 3 4 5
| x1=torch.rand((2,5)) x2=torch.rand((5,3)) x3=torch.mm(x1,x2) print(x3.shape)
|
torch.Size([2, 3])
1 2 3 4 5 6 7
|
matrix_exp=torch.tensor([[1,2],[3,4]],dtype=torch.float) print(matrix_exp.matrix_power(3))
torch.mm(matrix_exp,matrix_exp).mm(matrix_exp)
|
tensor([[ 37., 54.],
[ 81., 118.]])
tensor([[ 37., 54.],
[ 81., 118.]])
1 2 3 4 5
| x=torch.tensor([1,2,3]) y=torch.tensor([9,8,7]) z=torch.dot(x,y) print(z)
|
tensor(46)
1 2 3 4 5
| x=torch.tensor([1,2,3]) y=torch.tensor([9,8,7]) z=x*y print(z)
|
tensor([ 9, 16, 21])
1 2 3 4 5 6 7 8 9 10
| batch=32 n=10 m=20 p=30
tensor1=torch.rand((batch,n,m)) tensor2=torch.rand(batch,m,n) out_bmm=torch.bmm(tensor1,tensor2) print(out_bmm.shape)
|
torch.Size([32, 10, 10])
1 2 3 4 5 6 7 8 9 10 11
| x1=torch.tensor([[1,2,3],[4,5,6]]) x2=torch.tensor([2,3,1])
z=x1-x2 print(z)
zz=x1**x2 print(zz)
|
tensor([[-1, -1, 2],
[ 2, 2, 5]])
tensor([[ 1, 8, 3],
[ 16, 125, 6]])
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37
| x=torch.tensor([[5,2,3],[4,5,-1]]) y=torch.tensor([[5,2,3],[4,7,6]])
su_x=torch.sum(x,dim=0) print('su_x:',su_x)
values,indices=torch.max(x,dim=0) print('values:{},indices:{}'.format(values,indices))
abs_x=torch.abs(x) print('abs_x:',abs_x)
argmax=torch.argmax(x,dim=0) print('argmax:',argmax)
argmin=torch.argmin(x,dim=0) print('argmin:',argmin)
mean_x=torch.mean(x.float(),dim=0) print('mean_x:',mean_x)
eq=torch.eq(x,y) print('eq:',eq)
print('sort:',torch.sort(y,dim=0,descending=False))
print('x after clamp:',torch.clamp(x,min=0,max=10))
|
su_x: tensor([9, 7, 2])
values:tensor([5, 5, 3]),indices:tensor([0, 1, 0])
abs_x: tensor([[5, 2, 3],
[4, 5, 1]])
argmax: tensor([0, 1, 0])
argmin: tensor([1, 0, 1])
mean_x: tensor([4.5000, 3.5000, 1.0000])
eq: tensor([[ True, True, True],
[ True, False, False]])
sort: torch.return_types.sort(
values=tensor([[4, 2, 3],
[5, 7, 6]]),
indices=tensor([[1, 0, 0],
[0, 1, 1]]))
x after clamp: tensor([[5, 2, 3],
[4, 5, 0]])
1 2 3 4 5 6 7 8 9
| x=torch.tensor([1,0,1,1,1],dtype=torch.bool)
z=torch.any(x) print(z)
zz=torch.all(x) print(zz)
|
tensor(True)
tensor(False)
tensor索引
1 2 3
| batch_size=10 features=25 x=torch.rand((batch_size,features))
|
1 2
| print(x.shape) print(x[0].shape)
|
torch.Size([10, 25])
torch.Size([25])
torch.Size([10])
torch.Size([10])
tensor(10.)
1 2 3 4 5 6 7
| x=torch.arange(10) print(x)
indices=[2,5,8] print(x[indices])
|
tensor([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
tensor([2, 5, 8])
1 2 3 4 5
| x=torch.tensor([[1,2,3,4],[5,6,7,8],[9,8,7,6]]) rows=torch.tensor([1,0]) cols=torch.tensor([2,0])
print(x[rows,cols])
|
tensor([7, 1])
1 2 3 4
| x=torch.arange(10) print(x[(x<2)|(x>8)]) print(x[(x<2)&(x>8)]) print(x[x.remainder(2)==0])
|
tensor([0, 1, 9])
tensor([], dtype=torch.int64)
tensor([0, 2, 4, 6, 8])
1 2 3 4
| x=torch.arange(10)
print(torch.where(x>5,x,x*2))
|
tensor([ 0, 2, 4, 6, 8, 10, 6, 7, 8, 9])
1 2
| print(torch.tensor([0,0,1,2,2,3]).unique())
|
tensor([0, 1, 2, 3])
1 2 3
| x=torch.rand((4,5,3)) print(x.ndimension()) print(x.numel())
|
3
60
tesnor reshape
1 2 3 4 5 6 7
| x=torch.arange(9)
x2=x.reshape(3,3) print(x1) print(x2)
|
tensor([[0, 1, 2],
[3, 4, 5],
[6, 7, 8]])
tensor([[0, 1, 2],
[3, 4, 5],
[6, 7, 8]])
1 2 3 4 5 6
| x=torch.arange(9) y=x1.t() print(y)
yy=x1.t() print(yy)
|
tensor([[0, 3, 6],
[1, 4, 7],
[2, 5, 8]])
tensor([[0, 3, 6],
[1, 4, 7],
[2, 5, 8]])
1 2 3 4 5
| x1=torch.rand(2,5) x2=torch.rand(2,5)
print(torch.cat((x1,x2),dim=0).shape) print(torch.cat((x1,x2),dim=1).shape)
|
torch.Size([4, 5])
torch.Size([2, 10])
1 2 3
| z=x1.view(-1) print(z.shape)
|
torch.Size([10])
1 2 3 4 5
| batch=64 x=torch.rand((batch,2,5)) z=x.view(batch,-1) print(z.shape)
|
torch.Size([64, 10])
1 2 3
| z=x.permute(0,2,1) print(z.shape)
|
torch.Size([64, 5, 2])
1 2 3 4 5
| x=torch.arange(10) print(x.unsqueeze(0).shape) print(x.unsqueeze(1).shape) print(x.shape)
|
torch.Size([1, 10])
torch.Size([10, 1])
torch.Size([10])
1 2 3 4 5
| x=torch.arange(10).unsqueeze(0).unsqueeze(1) print(x.shape) z=x.squeeze(1) print(z.shape)
|
torch.Size([1, 1, 10])
torch.Size([1, 10])
以上内容大部分总结自一个Youtube小哥的视频,大家可以去看一下:
https://www.youtube.com/watch?v=x9JiIFvlUwk
当然,为了照顾无法科学上网的小伙伴,我已经将其搬运到了万能的B站:
https://www.bilibili.com/video/BV19v411j7Xd/
多年以后,
在一个风和日丽的午后,
一个老人躺在摇椅上晒着太阳,
有一个小孩大叫道:“爷爷,爷爷,你的推文有人点赞啦!”