torch.tensor(data, dtype=None, device=None, requiers_grad=False, pin_memory=False) # 从data创建tensor torch.from_numpy(ndarray) # 从numpy创建tensor data:数据,可以是list,numpy dtype:数据类型,默认与data一致 device:所在设备 gpu/cpu requores_grad:是否需要梯度 pin_memory:是否存于锁页内存
mat = np.array([[1,2,3], [2,3,4], [3,4,5]]) torch.tensor(mat)tensor([[1, 2, 3], [2, 3, 4], [3, 4, 5]], dtype=torch.int32)
lt = [1,2,3,4,5,6,7,8,9] torch.tensor(lt)tensor([1, 2, 3, 4, 5, 6, 7, 8, 9])
2.1 torch.zeros(*size, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 按size创建全0张量
size:张量的形状 如(3,3) (3,5,5) out:输出的张量 dtype:数据的类型 layout:内存中布局形式,有strided,sparse_coo等 device:所在设备 gpu/cpu requires_grad:是否需要梯度 torch.zeros(3,3) # 也可以写成torch.zeros((3,3))效果一样,也是创建一个(3,3)的全0张量tensor([[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]])
2.2 torch.zeros_like(input, dtype=None, layout=None, device=None, requires_grad=False) # 依据input形状创建全0张量¶
input:input是一个张量,依据input的形状创建一个全零张量 dtype:数据类型 layout:内存中布局形式 device:所在设备,gpu/cpu requires_grad:是否需要梯度 t1 = torch.tensor([[1,2,3], [2,3,4], [3,4,5]]) # 创建一个3x3的张量 t2 = torch.zeros_like(t1) print('t1:{},t1.shape:{}\nt2:{},t2.shape:{}'.format(t1,t1.shape,t2,t2.shape))t1:tensor([[1, 2, 3], [2, 3, 4], [3, 4, 5]]),t1.shape:torch.Size([3, 3]) t2:tensor([[0, 0, 0], [0, 0, 0], [0, 0, 0]]),t2.shape:torch.Size([3, 3])
2.3 torch.ones(*size, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 依size创建全1张量 2.4 torch.ones_like(input, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 依据input形状创建全1张量
size:张量的形状 如(3,3) (3,255,255) input:输入的张量,依据input张量的形状创建全1张量 dtype:数据类型 layput:内存中的布局形式 device:所在设备 GPU/CPU requires_grad:是否需要梯度 torch.ones(3,3) # 也可以写成torch.ones((3,3))tensor([[1., 1., 1.], [1., 1., 1.], [1., 1., 1.]])
t1 = torch.tensor([[1,2,3], [2,3,4], [3,4,5]]) # 创建一个3x3的张量 t2 = torch.ones_like(t1) print('t1:{},t1.shape:{}\nt2:{},t2.shape:{}'.format(t1,t1.shape,t2,t2.shape))t1:tensor([[1, 2, 3], [2, 3, 4], [3, 4, 5]]),t1.shape:torch.Size([3, 3]) t2:tensor([[1, 1, 1], [1, 1, 1], [1, 1, 1]]),t2.shape:torch.Size([3, 3])
2.5 torch.full(size, fill_value, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 依size创建一个全fill_value值的张量 2.6 torch.full_like(input, fill_value, dtype=None, layout=torch.strided, device=None, requires_grad=False)
size:张量的形状 input:输入的张量,依据input张量的形状创建全fill_value的张量 full_value:张量的值 torch.full((3,2), 3, dtype=torch.long) # 如果fill_value是一个整型,则函数默认返回一个torch.long类型的张量数据 若是想要全整型的张量,需要指定dtype的数据类型tensor([[3, 3], [3, 3], [3, 3]])
t1 = torch.tensor([[1,2,3], [2,3,4], [3,4,5]]) # 创建一个3x3的张量 t2 = torch.full_like(t1, 3.14, dtype=torch.float) # torch.full_like() 返回的数据类型默认是torch.long,若想创建torch.float类型的张量,需要指定的type的数据类型 print('t1:{},t1.shape:{}\nt2:{},t2.shape:{}'.format(t1,t1.shape,t2,t2.shape))t1:tensor([[1, 2, 3], [2, 3, 4], [3, 4, 5]]),t1.shape:torch.Size([3, 3]) t2:tensor([[3.1400, 3.1400, 3.1400], [3.1400, 3.1400, 3.1400], [3.1400, 3.1400, 3.1400]]),t2.shape:torch.Size([3, 3])
2.7 torch.arange(start=0, end, step, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 创建等差的1维张量
注意事项,数值区间为[start, end) start:数列起始值 end:数列结束值 step:数列公差,默认为1 torch.arange(0,5,1)tensor([0, 1, 2, 3, 4])
2.8 torch.linspace(start, end, steps=100, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 创建均分的1维张量
注意事项,数值区间为[start, end) start:数列起始值 end:数列结束值 steps:数列长度,默认为100 torch.linspace(0,1,10)tensor([0.0000, 0.1111, 0.2222, 0.3333, 0.4444, 0.5556, 0.6667, 0.7778, 0.8889, 1.0000])
2.9 torch.logspace(start, end, steps=100, base=10, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 创建对数均分的1维张量
注意事项,长度为steps,底为base start:数列起始值 end:数列结束值 steps:数列长度,默认为100 base:对数函数的的,默认为10 torch.logspace(0,1,steps=10,base=2)tensor([1.0000, 1.0801, 1.1665, 1.2599, 1.3608, 1.4697, 1.5874, 1.7145, 1.8517, 2.0000])
2.10 torch.eye(n, m=None, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 创建单位对角矩阵
n:矩阵行数 m:矩阵列数 torch.eye(3) # 一般只写行数即可tensor([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
3.1 torch.normal(mean,std) # 生成正态分布(高斯分布)
mean:均值 std: 标准差 # mean:张量 std:张量 mean = torch.arange(1,5, dtype=torch.float) std = torch.arange(1,5, dtype=torch.float) t1 = torch.normal(mean, std) print('mean:{},std:{}\n{}'.format(mean,std,t1))mean:tensor([1., 2., 3., 4.]),std:tensor([1., 2., 3., 4.]) tensor([1.5224, 2.3009, 4.9276, 1.0232])
# mean:标量 std: 标量 mean = 0. std = 1. t2 = torch.normal(mean, std, size=(4,)) # mean和std都是标量时要求确认size的形状 print('mean:{},std:{}\n{}'.format(mean,std,t1))mean:0.0,std:1.0 tensor([1.5224, 2.3009, 4.9276, 1.0232])
# mean:张量 std: 标量 mean = torch.arange(1, 5, dtype=torch.float) std = 1 t3 = torch.normal(mean, std) print("mean:{}\nstd:{}\n{}".format(mean, std, t3))mean:tensor([1., 2., 3., 4.]) std:1 tensor([0.0230, 2.6073, 3.4054, 3.8147])
# mean:标量 std:张量 mean = 1 std = torch.arange(1, 5, dtype=torch.float) t4 = torch.normal(mean, std) print("mean:{}\nstd:{}\n{}".format(mean, std, t3))mean:1 std:tensor([1., 2., 3., 4.]) tensor([0.0230, 2.6073, 3.4054, 3.8147])
3.2 torch.randn(*size, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) 3.3 torch.randn_like(input,out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False)
功能:生成标准正态分布 torch.randn(3,3) # 也可以写成torch.randn((3,3))tensor([[ 0.9868, 1.9955, 1.8769], [ 0.0447, -0.4348, 0.3323], [ 0.7515, 1.1565, -1.3405]])
t1 = torch.tensor([[1,2,3], [2,3,4], [3,4,5]],dtype=torch.float) # 创建一个3x3的张量 t2 = torch.randn_like(t1) print('t1:{},t1.shape:{}\nt2:{},t2.shape:{}'.format(t1,t1.shape,t2,t2.shape))t1:tensor([[1., 2., 3.], [2., 3., 4.], [3., 4., 5.]]),t1.shape:torch.Size([3, 3]) t2:tensor([[ 0.5603, -0.3141, -1.2211], [ 0.8488, -1.2941, -1.4203], [-0.0407, -0.4976, -1.8628]]),t2.shape:torch.Size([3, 3])
3.4 torch.rand(*size, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) 3.5 torch.rand_like(input, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False)
功能:在区间[0,1)上生成均匀分布 torch.rand(3,3) # 也可以写成torch.randn((3,3))tensor([[0.5838, 0.7109, 0.1751], [0.9796, 0.7830, 0.4504], [0.5398, 0.1588, 0.9341]])
t1 = torch.tensor([[1,2,3], [2,3,4], [3,4,5]],dtype=torch.float) # 创建一个3x3的张量 t2 = torch.rand_like(t1) print('t1:{},t1.shape:{}\nt2:{},t2.shape:{}'.format(t1,t1.shape,t2,t2.shape))t1:tensor([[1., 2., 3.], [2., 3., 4.], [3., 4., 5.]]),t1.shape:torch.Size([3, 3]) t2:tensor([[0.6894, 0.2823, 0.2676], [0.3813, 0.6077, 0.7439], [0.1812, 0.1561, 0.6160]]),t2.shape:torch.Size([3, 3])
3.6 torch.randint(low=0, high, size, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) 3.7 torch.randint_like()
功能:在区间[low,high)生成整数均匀分布 torch.randint(0,2,(3,3))tensor([[1, 0, 0], [1, 1, 1], [0, 0, 0]])
t1 = torch.tensor([[1,2,3], [2,3,4], [3,4,5]],dtype=torch.float) # 创建一个3x3的张量 t2 = torch.randint_like(t1,0,2) print('t1:{},t1.shape:{}\nt2:{},t2.shape:{}'.format(t1,t1.shape,t2,t2.shape))t1:tensor([[1., 2., 3.], [2., 3., 4.], [3., 4., 5.]]),t1.shape:torch.Size([3, 3]) t2:tensor([[0., 0., 0.], [1., 1., 1.], [1., 0., 0.]]),t2.shape:torch.Size([3, 3])
3.8 torch.randperm(n, out=None, dtype=None, layout=torch.strided, device=None, requires_grad=False) # 生成从0到n-1的随机排列
n:张量的长度 torch.randperm(5)tensor([1, 4, 2, 0, 3])
3.9 torch.bernoulli(input, *, generator=None, out=None) # 以input为概率,生成伯努利分布(0-1分布,两点分布)
input:概率值 inputs = torch.rand(3,3) t = torch.bernoulli(inputs) print('inputs:{}\nt:{}'.format(inputs, t))inputs:tensor([[0.4601, 0.1059, 0.8558], [0.6784, 0.4692, 0.1454], [0.3964, 0.7172, 0.0361]]) t:tensor([[0., 0., 1.], [1., 0., 0.], [1., 1., 0.]])
