17 lines
434 B
Python
17 lines
434 B
Python
# softmax squashes outputs so that the sum of the outputs equals 1 while preserving the order
|
|
import torch
|
|
import torch.nn as nn
|
|
import numpy as np
|
|
|
|
def softmax(x):
|
|
return np.exp(x)/np.sum(np.exp(x), axis=0)
|
|
|
|
x = np.array([2., 1., .1])
|
|
outputs = softmax(x)
|
|
print('inputs: ', x)
|
|
print('softmax numpy:', outputs)
|
|
|
|
x = torch.tensor([2., 1., .1])
|
|
outputs = torch.softmax(x, dim=0)
|
|
print('inputs: ', x)
|
|
print('softmax numpy:', outputs) |