torch中三維四維矩陣運算,以及多維softmax運算

三維矩陣*三維矩陣


import torch

tensors = torch.tensor([[[1,2],[1,2],[1,2]],[[1,2],[1,2],[1,2]],[[1,2],[1,2],[1,2]]])
print(tensors.data)
print(tensors.data.shape)
mul_result = torch.matmul(tensors,tensors.transpose(1, 2))
print(tensors.transpose(1, 2))
print(tensors.transpose(1, 2).data.shape)
print(mul_result.data)
print(mul_result.data.shape)

實驗結果:

tensor([[[1, 2],
         [1, 2],
         [1, 2]],

        [[1, 2],
         [1, 2],
         [1, 2]],

        [[1, 2],
         [1, 2],
         [1, 2]]])
torch.Size([3, 3, 2])
tensor([[[1, 1, 1],
         [2, 2, 2]],

        [[1, 1, 1],
         [2, 2, 2]],

        [[1, 1, 1],
         [2, 2, 2]]])
torch.Size([3, 2, 3])
tensor([[[5, 5, 5],
         [5, 5, 5],
         [5, 5, 5]],

        [[5, 5, 5],
         [5, 5, 5],
         [5, 5, 5]],

        [[5, 5, 5],
         [5, 5, 5],
         [5, 5, 5]]])
torch.Size([3, 3, 3])

3*3*2的乘以2*3的結果是3x3x3的矩陣

softmax針對三維矩陣的計算

# -*- coding: utf-8 -*-
# @Time : 2020/3/13 10:53
# @Author : liusen
import torch
import torch.nn.functional as F

input = torch.randn(3, 4)
print(input)

b = F.softmax(input, dim=0)  # 按列SoftMax,列和爲1
print(b)

c = F.softmax(input, dim=1)  # 按行SoftMax,行和爲1
print(c)

d = torch.max(input, dim=0)  # 按列取max,
print(d)

e = torch.max(input, dim=1)  # 按行取max,
print(e)

a = torch.rand(2, 3, 4)
print(a)

b = F.softmax(a, dim=0)
print(b)
c = F.softmax(a, dim=1)
print(c)
d = F.softmax(a, dim=2)
print(d)

計算結果:

tensor([[-1.8506,  2.2976, -1.8057,  0.6527],
        [-0.1433,  0.7569,  1.2549, -1.2423],
        [-0.4881,  0.5712, -0.1003,  0.2191]])
tensor([[0.0960, 0.7183, 0.0359, 0.5560],
        [0.5292, 0.1539, 0.7664, 0.0836],
        [0.3748, 0.1278, 0.1977, 0.3604]])
tensor([[0.0129, 0.8161, 0.0135, 0.1575],
        [0.1275, 0.3137, 0.5162, 0.0425],
        [0.1354, 0.3905, 0.1995, 0.2746]])
(tensor([-0.1433,  2.2976,  1.2549,  0.6527]), tensor([1, 0, 1, 0]))
(tensor([2.2976, 1.2549, 0.5712]), tensor([1, 2, 1]))
tensor([[[2.4603e-01, 2.1787e-01, 2.3161e-01, 4.3792e-04],
         [6.3455e-01, 1.1633e-02, 5.2575e-01, 7.3938e-01],
         [5.6934e-03, 6.4597e-02, 3.1555e-02, 1.2646e-01]],

        [[5.3523e-01, 1.5827e-01, 1.9162e-01, 9.0702e-01],
         [6.5452e-01, 7.4950e-01, 3.8926e-01, 7.0512e-01],
         [7.6378e-01, 5.1268e-01, 7.0699e-01, 1.1787e-02]]])
tensor([[[0.4282, 0.5149, 0.5100, 0.2877],
         [0.4950, 0.3235, 0.5341, 0.5086],
         [0.3191, 0.3898, 0.3373, 0.5286]],

        [[0.5718, 0.4851, 0.4900, 0.7123],
         [0.5050, 0.6765, 0.4659, 0.4914],
         [0.6809, 0.6102, 0.6627, 0.4714]]])
tensor([[[0.3066, 0.3743, 0.3164, 0.2365],
         [0.4522, 0.3046, 0.4246, 0.4952],
         [0.2411, 0.3211, 0.2590, 0.2683]],

        [[0.2956, 0.2363, 0.2569, 0.4493],
         [0.3330, 0.4268, 0.3130, 0.3672],
         [0.3714, 0.3368, 0.4301, 0.1835]]])
tensor([[[0.2674, 0.2599, 0.2635, 0.2091],
         [0.2822, 0.1514, 0.2531, 0.3134],
         [0.2372, 0.2516, 0.2435, 0.2677]],

        [[0.2600, 0.1784, 0.1844, 0.3772],
         [0.2552, 0.2806, 0.1957, 0.2684],
         [0.3131, 0.2436, 0.2958, 0.1476]]])

對於二維數組如果dim=0,那就是對0維度進行softmax

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章