Pytorch使用介绍

张量(tensor)的创建
# 创建张量
>>> data=torch.tensor([[3,5,5]])
>>> data
tensor([[3, 5, 5]])
# 通过numpy创建张量
>>> import numpy as np
>>> data=torch.tensor(np.array([3,6]))
>>> data
tensor([3, 6])

**创建固定大小的张量**


#创建一个3维全为1的张量
>>> data=torch.ones(2,2,3)
>>> data
tensor([[[1., 1., 1.],
         [1., 1., 1.]],

        [[1., 1., 1.],
         [1., 1., 1.]]])
# 创建一个2维2行3列全为1的张量
>>> data=torch.ones([2,3])
>>> data
tensor([[1., 1., 1.],
        [1., 1., 1.]])
        
# 创建一个3维的全为0的张量
>>> data=torch.zeros(2,2,3)
>>> data
tensor([[[0., 0., 0.],
         [0., 0., 0.]],

        [[0., 0., 0.],
         [0., 0., 0.]]])

#创建一个2维的全为0的张量
>>> data=torch.zeros([2,3])
>>> data
tensor([[0., 0., 0.],
        [0., 0., 0.]])

# 创建一个等差的装量 5个数从1到10之间的数字 
>>> data=torch.linspace(1,10,5)
>>> data
tensor([ 1.0000,  3.2500,  5.5000,  7.7500, 10.0000])
# 创建从开始位置到结束位置指定步长的张量
>>> data=torch.arange(1,6,2)
>>> data
tensor([1, 3, 5])

**创建随机大小大张量**

#创建一个3维3行2列的随机值的tensor,随机值的区间是[0, 1)
>>> data=torch.rand(2,3,2)
>>> data
tensor([[[0.7116, 0.5658],
         [0.3859, 0.8925],
         [0.1764, 0.3736]],

        [[0.4448, 0.9351],
         [0.4528, 0.7924],
         [0.2277, 0.9085]]])


#创建2行3列的随机数的tensor,随机值的分布式均值为0,方差为1
>>> data=torch.randn([2,3])
>>> data
tensor([[ 0.9683, -1.7999, -0.3686],
        [ 0.4960,  1.2822, -0.2676]])
>>> data=torch.randn(2,3,4)
>>> data
tensor([[[-0.8879,  0.3256, -0.4455,  1.0120],
         [-0.0365, -1.2527, -1.1812,  0.7545],
         [-1.6055, -0.5800,  0.6748, -1.9849]],

        [[-1.3195,  0.4951,  1.0666,  0.3471],
         [-0.3996, -0.2203,  0.2846,  0.0356],
         [ 0.0829, -0.9803,  0.6054,  0.0689]]])
# 创建2行3列的随机整数的tensor,随机值的区间是[1, 11)
>>> data=torch.randint(1,11,[2,3])
>>> data
tensor([[4, 8, 2],
        [2, 7, 4]])
张量属性的查询
>>> data=torch.randn(2,3,4)
>>> data
tensor([[[ 0.2599, -0.2836, -1.5110,  1.9127],
         [ 0.4096, -0.2133, -0.2346,  0.3524],
         [-0.4831,  1.5383, -0.2450, -0.0594]],

        [[-1.1935, -1.0393, -0.0275, -1.6303],
         [ 0.5875,  0.1766,  0.4808,  0.2339],
         [ 0.4558,  1.2427,  0.1718, -1.0303]]])

tensor([3, 6])
# 查询张量的数据类型
>>> data.dtype
torch.float32
# 查询张量的阶
torch.float32


# 查询张量的形状
>>> data.size()
torch.Size([2, 3, 4])

#转化为numpy方式查询数据
>>> data.numpy()
array([[[ 0.25989547, -0.28362632, -1.5109968 ,  1.9127252 ],
        [ 0.4096491 , -0.21327065, -0.2346155 ,  0.3524418 ],
        [-0.48313147,  1.5383261 , -0.24496083, -0.05937861]],

       [[-1.1935025 , -1.039305  , -0.02747562, -1.63026   ],
        [ 0.5874608 ,  0.1766219 ,  0.48080933,  0.23386581],
        [ 0.45576093,  1.242688  ,  0.17179173, -1.0302773 ]]],
      dtype=float32)

张量的相关操作
>>> data
tensor([[[ 0.2599, -0.2836, -1.5110,  1.9127],
         [ 0.4096, -0.2133, -0.2346,  0.3524],
         [-0.4831,  1.5383, -0.2450, -0.0594]],

        [[-1.1935, -1.0393, -0.0275, -1.6303],
         [ 0.5875,  0.1766,  0.4808,  0.2339],
         [ 0.4558,  1.2427,  0.1718, -1.0303]]])

# 张量形状变换 (view)
>>> data.view(2,2,6)
tensor([[[ 0.2599, -0.2836, -1.5110,  1.9127,  0.4096, -0.2133],
         [-0.2346,  0.3524, -0.4831,  1.5383, -0.2450, -0.0594]],

        [[-1.1935, -1.0393, -0.0275, -1.6303,  0.5875,  0.1766],
         [ 0.4808,  0.2339,  0.4558,  1.2427,  0.1718, -1.0303]]])

# 数据类型变换
>>> data.type(torch.int64)
tensor([[[ 0,  0, -1,  1],
         [ 0,  0,  0,  0],
         [ 0,  1,  0,  0]],

        [[-1, -1,  0, -1],
         [ 0,  0,  0,  0],
         [ 0,  1,  0, -1]]])


# 张量切片操作
>>> data
tensor([[[ 0.2599, -0.2836, -1.5110,  1.9127],
         [ 0.4096, -0.2133, -0.2346,  0.3524],
         [-0.4831,  1.5383, -0.2450, -0.0594]],

        [[-1.1935, -1.0393, -0.0275, -1.6303],
         [ 0.5875,  0.1766,  0.4808,  0.2339],
         [ 0.4558,  1.2427,  0.1718, -1.0303]]])
>>> data[0]
tensor([[ 0.2599, -0.2836, -1.5110,  1.9127],
        [ 0.4096, -0.2133, -0.2346,  0.3524],
        [-0.4831,  1.5383, -0.2450, -0.0594]])
>>> data[0][1]
tensor([ 0.4096, -0.2133, -0.2346,  0.3524])

# 张量转置
>>> data2
tensor([[-0.1970,  0.6145, -1.4317],
        [-0.9910, -1.1225, -1.4334]])
>>> data2.t()
tensor([[-0.1970, -0.9910],
        [ 0.6145, -1.1225],
        [-1.4317, -1.4334]])

# 张量轴变换  permute  transpose
>>> data
tensor([[[ 0.2599, -0.2836, -1.5110,  1.9127],
         [ 0.4096, -0.2133, -0.2346,  0.3524],
         [-0.4831,  1.5383, -0.2450, -0.0594]],

        [[-1.1935, -1.0393, -0.0275, -1.6303],
         [ 0.5875,  0.1766,  0.4808,  0.2339],
         [ 0.4558,  1.2427,  0.1718, -1.0303]]])
>>> data.size()
torch.Size([2, 3, 4])

# 按照指定下标的方式互换
**data2=data.permute(2,0,1)**

>>> data2
tensor([[[ 0.2599,  0.4096, -0.4831],
         [-1.1935,  0.5875,  0.4558]],

        [[-0.2836, -0.2133,  1.5383],
         [-1.0393,  0.1766,  1.2427]],

        [[-1.5110, -0.2346, -0.2450],
         [-0.0275,  0.4808,  0.1718]],

        [[ 1.9127,  0.3524, -0.0594],
         [-1.6303,  0.2339, -1.0303]]])
>>> data2.size()
torch.Size([4, 2, 3])

# 将下标索引0 1互换
>>> data.transpose(0,1)
tensor([[[ 0.2599, -0.2836, -1.5110,  1.9127],
         [-1.1935, -1.0393, -0.0275, -1.6303]],

        [[ 0.4096, -0.2133, -0.2346,  0.3524],
         [ 0.5875,  0.1766,  0.4808,  0.2339]],

        [[-0.4831,  1.5383, -0.2450, -0.0594],
         [ 0.4558,  1.2427,  0.1718, -1.0303]]])

**维度的扩增  unsqueeze**

>>> data
tensor([[-0.3785, -0.7493,  0.0425],
        [-0.3215,  0.2367, -0.4611]])
>>> data.size()
torch.Size([2, 3])
>>> data3=data.unsqueeze(0)
>>> data3
tensor([[[-0.3785, -0.7493,  0.0425],
         [-0.3215,  0.2367, -0.4611]]])
>>> data3.size()
torch.Size([1, 2, 3])

# 在下标1的维度扩增一个维度
>>> data6=data.unsqueeze(1)
>>> data6
tensor([[[-0.3785, -0.7493,  0.0425]],

        [[-0.3215,  0.2367, -0.4611]]])
>>> data6.size()
torch.Size([2, 1, 3])

#在最后一个维度扩增一个维度
>>> data8=data.unsqueeze(-1)
>>> data8
tensor([[[-0.3785],
         [-0.7493],
         [ 0.0425]],

        [[-0.3215],
         [ 0.2367],
         [-0.4611]]])

>>> data8.size()
torch.Size([2, 3, 1])


维度的扩增  
unsqueeze

>>> data=torch.randn([2,3])
>>> data
tensor([[-0.7107,  0.2864,  1.3584],
        [ 0.1806,  0.8982,  1.0520]])
>>> data.size()
torch.Size([2, 3])

# 在指定下标位置0扩增一个维度
>>> data3=torch.unsqueeze(data,0)
>>> data3
tensor([[[ 0.1823,  0.0663,  0.4067],
         [-1.9471,  0.2019,  1.1418]]])
>>> data.unsqueeze(0)
tensor([[[ 0.1823,  0.0663,  0.4067],
         [-1.9471,  0.2019,  1.1418]]])

>>> data3.size()
torch.Size([1, 2, 3])

#在data中指定位置N去掉一个维数为1的维度
>>> data.size()
torch.Size([2, 3])

>>> data.squeeze(0)
tensor([[ 0.1823,  0.0663,  0.4067],
        [-1.9471,  0.2019,  1.1418]])

>>> data6=torch.squeeze(data,0)
>>> data6
tensor([[ 0.1823,  0.0663,  0.4067],
        [-1.9471,  0.2019,  1.1418]])
>>> data6.size()
torch.Size([2, 3])

张量的运算

统计操作 :tensor.max,tensor.min, tensor.mean,tensor.median ,tensor.argmax

>>> data
tensor([[[ 0.0508,  0.5197,  2.2676, -0.5157],
         [-0.9169, -1.7118,  1.1145,  0.2173],
         [-1.1510,  1.5473,  0.1611,  0.3918]],

        [[ 0.3562, -0.0962,  0.8505,  0.0344],
         [-0.5413, -0.1265,  1.4267, -1.1346],
         [ 1.5966,  0.1997, -0.5586,  0.2565]]])

#求最大值
>>> data.max()
tensor(2.2676)
>>> data=torch.randint(1,50,[4,10])
>>> data
tensor([[19,  1, 33, 43,  7, 17, 32, 36, 16, 17],
        [28, 11, 34, 49, 14, 47, 13, 31, 28, 17],
        [31,  4, 46, 36, 34, 11, 44, 12, 16,  3],
        [ 5, 32, 31, 49, 42, 17,  6, 28, 42, 31]])

>>> data.max(dim=1)
torch.return_types.max(
values=tensor([43, 49, 46, 49]),
indices=tensor([3, 3, 2, 3]))

#取最
小值
>>> data.min()
tensor(-1.7118)

#取平均值
>>> data.mean()
tensor(0.1766)

#取中位数
>>> data.median()
tensor(0.1611)

#取最大值的下标
>>> data.argmax()
tensor(2)

>>> data.argmax(dim=1)
tensor([3, 3, 2, 3])


常用数学计算(tensor之间元素级别的数学运算同样适用广播机制)
tensor.add tensor.sub tensor.mm tensor.abs

指数运算
torch.exp torch.sin torch.cos

-place 原地操作 tensor.add_ tensor.sub_ tensor.abs_

>>> a
tensor([2, 3, 4])
>>> b=torch.tensor([2,4,4])
>>> a.add_(b)
tensor([4, 7, 8])
>>> a
tensor([4, 7, 8])

发布了66 篇原创文章 · 获赞 1 · 访问量 7015

猜你喜欢

转载自blog.csdn.net/qq_41128383/article/details/105541207