Files
pytorch_learn/11_01_softmax.py
Joseph Hopfmüller 530bcae7e8 finish chapter 13
2022-10-17 17:01:17 +02:00

23 lines
539 B
Python

# softmax squashes outputs so that the sum of the outputs equals 1 while preserving the order
import torch
import torch.nn as nn
import numpy as np
def softmax(x):
return np.exp(x)/np.sum(np.exp(x), axis=0)
x = np.array([2., 1., .1])
outputs = softmax(x)
print('inputs: ', x)
print('softmax numpy:', outputs)
x = torch.tensor([2., 1., .1])
outputs = torch.softmax(x, dim=0)
print('inputs: ', x)
print('softmax numpy:', outputs)
outputs2 = torch.softmax(outputs, dim=0)
print('inputs: ', outputs)
print('softmax numpy:', outputs2)