总结
# -*- coding: utf-8 -*-
"""
Created on Mon May 27 11:09:52 2019
@author: jiangshan
"""
import torch
import numpy
import torch.nn.functional as F
x1= torch.Tensor( [[1,2,3,4],[1,3,4,5],[3,4,5,6]])
print(x1)
>>
tensor([[1., 2., 3., 4.],
[1., 3., 4., 5.],
[3., 4., 5., 6.]])
import math
#将torch.Tensor转换成numpy
x1_num = x1.numpy()
print(x1_num)
>>
[[1. 2. 3. 4.]
[1. 3. 4. 5.]
[3. 4. 5. 6.]]
r,c = x1_num.shape
Row_softmax = numpy.ndarray((r, c), dtype=numpy.float64)
Clo_softmax = numpy.ndarray((r, c), dtype=numpy.float64)
#对每一行进行softmax
for i in range(r):
sum_j = 0
for j in range(c):
ins = x1_num[i,j]
sum_j += math.exp(ins)
for j in range(c):
out_j = math.exp(x1_num[i,j])/sum_j
Row_softmax[i,j] = out_j
print(out_j)
print('=====row-%d-end====='%(i+1))
print(Row_softmax)
>>
0.03205860328008499
0.08714431874203257
0.23688281808991013
0.6439142598879722
=====row-1-end=====
0.01203764271193945
0.0889468172974043
0.24178251715880075
0.6572330228318555
=====row-2-end=====
0.03205860328008499
0.08714431874203256
0.23688281808991013
0.6439142598879724
=====row-3-end=====
[[0.0320586 0.08714432 0.23688282 0.64391426]
[0.01203764 0.08894682 0.24178252 0.65723302]
[0.0320586 0.08714432 0.23688282 0.64391426]]
y12 = F.softmax(x1,dim = 1) #对每一行进行softmax --- dim = 1轴
print(y12)
y120 = F.softmax(x1,dim = -1) #对每一行进行softmax --- dim = -1
print(y120)
>>
tensor([[0.0321, 0.0871, 0.2369, 0.6439],
[0.0120, 0.0889, 0.2418, 0.6572],
[0.0321, 0.0871, 0.2369, 0.6439]])
tensor([[0.0321, 0.0871, 0.2369, 0.6439],
[0.0120, 0.0889, 0.2418, 0.6572],
[0.0321, 0.0871, 0.2369, 0.6439]])
#对每一列进行softmax
for j in range(c):
sum_i = 0
for i in range(r):
ins = x1_num[i,j]
sum_i += math.exp(ins)
for i in range(r):
out_i = math.exp(x1_num[i,j])/sum_i
Clo_softmax[i,j] = out_i
print(out_i)
print('=====col-%d-end====='%(j+1))
print(Clo_softmax)
>>
0.10650697891920075
0.10650697891920075
0.7869860421615985
=====col-1-end=====
0.09003057317038046
0.24472847105479767
0.6652409557748219
=====col-2-end=====
0.09003057317038046
0.24472847105479764
0.6652409557748219
=====col-3-end=====
0.09003057317038045
0.24472847105479764
0.6652409557748219
=====col-4-end=====
[[0.10650698 0.09003057 0.09003057 0.09003057]
[0.10650698 0.24472847 0.24472847 0.24472847]
[0.78698604 0.66524096 0.66524096 0.66524096]]
y11= F.softmax(x1, dim = 0) #对每一列进行softmax ---- dim = 0轴
print(y11)
print('=================================================')
>>
tensor([[0.1065, 0.0900, 0.0900, 0.0900],
[0.1065, 0.2447, 0.2447, 0.2447],
[0.7870, 0.6652, 0.6652, 0.6652]])
=================================================
# 1 维张量
x2 = torch.Tensor([1,2,3,4])
print(x2)
y2 = F.softmax(x2,dim=0) #对每一列进行softmax ---- dim = 0轴
print(y2)
y20 = F.softmax(x2,dim=-1)
print(y20)
print('=================================================')
>>
tensor([1., 2., 3., 4.])
tensor([0.0321, 0.0871, 0.2369, 0.6439])
tensor([0.0321, 0.0871, 0.2369, 0.6439])
=================================================
# 2 维张量
x3 = torch.Tensor([[1],[1],[3]])
print(x3)
>>
tensor([[1.],
[1.],
[3.]])
y3 = F.softmax(x3,dim=1)#对每一行进行softmax --- dim = 1轴
print(y3)
y31 = F.softmax(x3,dim=0) #对每一列进行softmax ---- dim = 0轴
print(y31)
y30 = F.softmax(x3,dim=-1)
print(y30)
>>
tensor([[1.],
[1.],
[1.]])
tensor([[0.1065],
[0.1065],
[0.7870]])
tensor([[1.],
[1.],
[1.]])