Files
pytorch_learn/11_01_softmax.py
Joseph Hopfmüller 4d121641d1 finish chapter 12
2022-10-17 16:25:41 +02:00

17 lines
434 B
Python

# softmax squashes outputs so that the sum of the outputs equals 1 while preserving the order
import torch
import torch.nn as nn
import numpy as np
def softmax(x):
return np.exp(x)/np.sum(np.exp(x), axis=0)
x = np.array([2., 1., .1])
outputs = softmax(x)
print('inputs: ', x)
print('softmax numpy:', outputs)
x = torch.tensor([2., 1., .1])
outputs = torch.softmax(x, dim=0)
print('inputs: ', x)
print('softmax numpy:', outputs)