Skip to content

Commit 5c0b663

Browse files
authored
sample: separate softmax and temperature transforms (ollama#9732)
1 parent 4aeb67e commit 5c0b663

File tree

3 files changed

+98
-25
lines changed

3 files changed

+98
-25
lines changed

sample/samplers.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -87,8 +87,8 @@ func (s *Sampler) sample(tokens []token) (token, error) {
8787
// topK also sorts the tokens in descending order of logits
8888
tokens = topK(tokens, s.topK)
8989

90-
// token logit values are updated to probabilities
9190
tokens = temperature(tokens, s.temperature)
91+
tokens = softmax(tokens)
9292

9393
tokens = topP(tokens, s.topP)
9494
tokens = minP(tokens, s.minP)

sample/transforms.go

Lines changed: 14 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,18 @@ func (h *tokenHeap) Pop() any {
2525
return x
2626
}
2727

28-
// temperature applies scaling and softmax to the logits
28+
// temperature applies scaling to the logits
2929
func temperature(ts []token, temp float32) []token {
30+
// Ensure temperature clipping near 0 to avoid numerical instability
31+
temp = max(temp, 1e-7)
32+
for i := range ts {
33+
ts[i].value = ts[i].value / temp
34+
}
35+
return ts
36+
}
37+
38+
// softmax applies normalization to the logits
39+
func softmax(ts []token) []token {
3040
// Find max logit for numerical stability
3141
maxLogit := float32(math.Inf(-1))
3242
for _, t := range ts {
@@ -35,15 +45,14 @@ func temperature(ts []token, temp float32) []token {
3545
}
3646
}
3747

38-
// Apply temperature and compute exp(x - max)
39-
temp = max(temp, 1e-7)
48+
// Compute exp(x - max)
4049
var sum float32
4150
for i, v := range ts {
42-
ts[i].value = float32(math.Exp(float64((v.value - maxLogit) / temp)))
51+
ts[i].value = float32(math.Exp(float64(v.value - maxLogit)))
4352
sum += ts[i].value
4453
}
4554

46-
// Normalize
55+
// exp(x - max) / sum(exp(x - max))
4756
for i := range ts {
4857
ts[i].value /= sum
4958
}

sample/transforms_test.go

Lines changed: 83 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -32,27 +32,83 @@ func compareLogits(t *testing.T, name string, want []float32, got []token) {
3232
}
3333
}
3434

35-
func TestTemperatureAndSoftmax(t *testing.T) {
36-
input := []float32{1, 4, -2, 0}
35+
func TestTemperature(t *testing.T) {
36+
input := []float32{1.0, 4.0, -2.0, 0.0}
3737
got := temperature(toTokens(input), 0.5)
38+
want := []float32{2.0, 8.0, -4.0, 0.0}
39+
compareLogits(t, "temperature(0.5)", want, got)
3840

39-
// Check probabilities sum to 1
40-
var sum float32
41-
for _, token := range got {
42-
sum += token.value
43-
}
44-
if math.Abs(float64(sum-1.0)) > 1e-6 {
45-
t.Errorf("probabilities don't sum to 1: got %f", sum)
46-
}
41+
got = temperature(toTokens(input), 1.0)
42+
want = []float32{1.0, 4.0, -2.0, 0.0}
43+
compareLogits(t, "temperature(1)", want, got)
44+
45+
got = temperature(toTokens(input), 0.0)
46+
want = []float32{1e7, 4e7, -2e7, 0.0}
47+
compareLogits(t, "temperature(0)", want, got)
48+
}
4749

48-
got = temperature(toTokens(input), 1)
49-
// Check probabilities sum to 1
50-
sum = 0.0
51-
for _, token := range got {
52-
sum += token.value
50+
func TestSoftmax(t *testing.T) {
51+
tests := []struct {
52+
name string
53+
input []float32
54+
expected []float32
55+
}{
56+
{
57+
name: "correctness softmax",
58+
input: []float32{1, -2, 3, 0},
59+
expected: []float32{0.113550, 0.005653, 0.839024, 0.041773},
60+
},
61+
{
62+
name: "normal distribution",
63+
input: []float32{0.026986899, 0.043722924, 0.036774673, 0.27755088, 0.0046718004, 0.08582123, 0.20409796, 0.00412893, 0.15720603, 0.045046154, 0.0030491839, 0.01681367},
64+
},
65+
{
66+
name: "single value",
67+
input: []float32{1.0},
68+
},
69+
{
70+
name: "identical values",
71+
input: []float32{0.9, 0.9, 0.9},
72+
},
73+
{
74+
name: "large values",
75+
input: []float32{1000.0, 2000.0, 3000.0},
76+
},
77+
{
78+
name: "small values",
79+
input: []float32{1e-6, 2e-6, 3e-6},
80+
},
81+
{
82+
name: "negative values",
83+
input: []float32{-1.0, -2.0, -3.0},
84+
},
85+
{
86+
name: "mixed values",
87+
input: []float32{-100.0, 0.0, 100.0},
88+
},
5389
}
54-
if math.Abs(float64(sum-1.0)) > 1e-6 {
55-
t.Errorf("probabilities don't sum to 1: got %f", sum)
90+
91+
for _, tt := range tests {
92+
t.Run(tt.name, func(t *testing.T) {
93+
got := softmax(toTokens(tt.input))
94+
95+
if tt.expected != nil {
96+
compareLogits(t, tt.name, tt.expected, got)
97+
return
98+
}
99+
100+
// Check probabilities sum to 1
101+
var sum float32
102+
for _, token := range got {
103+
sum += token.value
104+
if token.value < 0 || token.value > 1 {
105+
t.Errorf("probability out of range [0,1]: got %f", token.value)
106+
}
107+
}
108+
if math.Abs(float64(sum-1.0)) > 1e-6 {
109+
t.Errorf("probabilities don't sum to 1: got %f", sum)
110+
}
111+
})
56112
}
57113
}
58114

@@ -97,7 +153,7 @@ func TestTopP(t *testing.T) {
97153
tokens := toTokens(input)
98154

99155
// First apply temperature and softmax to get probabilities
100-
tokens = temperature(tokens, 1)
156+
tokens = softmax(tokens)
101157
tokens = topK(tokens, 20)
102158

103159
// Then apply topP
@@ -115,7 +171,7 @@ func TestMinP(t *testing.T) {
115171
tokens := toTokens(input)
116172

117173
// First apply temperature and softmax
118-
tokens = temperature(tokens, 1)
174+
tokens = softmax(tokens)
119175

120176
// Then apply minP
121177
got := minP(tokens, 0.2)
@@ -163,6 +219,14 @@ func BenchmarkTransforms(b *testing.B) {
163219
}
164220
})
165221

222+
b.Run("Softmax", func(b *testing.B) {
223+
b.ResetTimer()
224+
for b.Loop() {
225+
copy(tokensCopy, tokens)
226+
softmax(tokensCopy)
227+
}
228+
})
229+
166230
b.Run("TopK", func(b *testing.B) {
167231
b.ResetTimer()
168232
for b.Loop() {

0 commit comments

Comments
 (0)