Skip to content

Commit

Permalink
add gradient descent
Browse files Browse the repository at this point in the history
  • Loading branch information
soypat committed May 16, 2023
1 parent e7fe094 commit 1a2fca7
Show file tree
Hide file tree
Showing 6 changed files with 164 additions and 12 deletions.
5 changes: 1 addition & 4 deletions genes/constrainedfloat.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,7 @@ func (c *ConstrainedFloat) Value() float64 { return c.gene }
// for setting best gene value for a single individual in the
// population by hand between runs.
func (c *ConstrainedFloat) SetValue(f float64) {
if f < c.min || f > c.maxMinus1+1 {
panic("value not within constraints")
}
c.gene = f
c.gene = c.clamp(f)
}

func (c *ConstrainedFloat) Mutate(rng *rand.Rand) {
Expand Down
5 changes: 5 additions & 0 deletions genes/constrainednormal.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,3 +50,8 @@ func (cn *ConstrainedNormalDistr) clamp() {
max := cn.maxMinus3sd + sd3
cn.gene = math.Max(min, math.Min(max, cn.gene))
}

func (cn *ConstrainedNormalDistr) SetValue(f float64) {
cn.gene = f
cn.clamp()
}
2 changes: 1 addition & 1 deletion genes/genes.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ var (
func castGene[T mu8.Gene](gene mu8.Gene) T {
g, ok := gene.(T)
if !ok {
panic(ErrMismatchedGeneType.Error())
panic(fmt.Errorf("%w: cast %T->%T failed", ErrMismatchedGeneType, gene, g))
}
return g
}
Expand Down
40 changes: 40 additions & 0 deletions genes/grad.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
package genes

import "github.com/soypat/mu8"

const defaultStep = 5e-7

var (
_ mu8.GeneGrad = (*ConstrainedFloatGrad)(nil)
)

type ConstrainedFloatGrad struct {
ConstrainedFloat
stepMinusDefaultStep float64
}

func (cf *ConstrainedFloatGrad) Step() float64 {
return cf.stepMinusDefaultStep + defaultStep
}

func NewConstrainedFloatGrad(start, min, max, step float64) *ConstrainedFloatGrad {
return &ConstrainedFloatGrad{
ConstrainedFloat: *NewConstrainedFloat(start, min, max),
stepMinusDefaultStep: step - defaultStep,
}
}

type ConstrainedNormalDistrGrad struct {
ConstrainedNormalDistr
stepMinusDefaultStep float64
}

func (cf *ConstrainedNormalDistrGrad) Step() float64 {
return cf.stepMinusDefaultStep + defaultStep
}
func NewConstrainedNormalGrad(start, stddev, min, max, step float64) *ConstrainedNormalDistrGrad {
return &ConstrainedNormalDistrGrad{
ConstrainedNormalDistr: *NewConstrainedNormalDistr(start, stddev, min, max),
stepMinusDefaultStep: step - defaultStep,
}
}
71 changes: 68 additions & 3 deletions mu8.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package mu8
import (
"context"
"errors"
"math"
"math/rand"
)

Expand Down Expand Up @@ -40,9 +41,7 @@ type Gene interface {
// in receiving Gene.
CloneFrom(Gene)

// Mutate performs a random mutation on the receiver. rand is a random number between [0, 1)
// which is usually calculated beforehand to determine if Gene is to be mutated.
// The rng argument intends to aid with randomness and Mutate implementation process.
// Mutate performs a random mutation on the receiver with the aid of rng.
Mutate(rng *rand.Rand)
}

Expand Down Expand Up @@ -81,3 +80,69 @@ func Clone(dst, src Genome) error {
}
return nil
}

// GenomeGrad is a Genome that can be used with gradient descent.
type GenomeGrad interface {
Simulate(context.Context) (fitness float64)
GetGeneGrad(i int) GeneGrad
Len() int
}

// GeneGrad is a Gene that can be used with gradient descent.
type GeneGrad interface {
SetValue(float64)
Value() float64
Step() float64
}

// GradientDescent computes the Gradient of the GenomeGrad g using finite differences.
// It stores the result of the calculation to grad. The length of grad must match
// the number of Genes in g. The startIndividual argument is used to seed the
// individual on every run of the simulation if it is not possible to reuse
// startIndividual between simulations. If newIndividual is nil then the same individual is
// used for all runs.
func Gradient[T GenomeGrad](ctx context.Context, grad []float64, startIndividual T, newIndividual func() T) error {
if startIndividual.Len() != len(grad) {
panic("scratch length mismatch")
}
startFitness := startIndividual.Simulate(ctx)
for i := 0; i < startIndividual.Len() && ctx.Err() == nil; i++ {
if newIndividual != nil {
blankSlate := newIndividual()
CloneGrad(blankSlate, startIndividual)
startIndividual = blankSlate
}
gene := startIndividual.GetGeneGrad(i)
start := gene.Value()
step := gene.Step()
if step == 0 {
return errors.New("zero step size")
}
gene.SetValue(start + step)
newFitness := startIndividual.Simulate(ctx)
if newFitness < 0 {
return errors.New("negative fitness")
} else if math.IsNaN(newFitness) || math.IsInf(newFitness, 0) {
return errors.New("invalid fitness (NaN or Inf))")
}
grad[i] = (newFitness - startFitness) / step
gene.SetValue(start) // Return gene to original value.
}
return nil
}

// CloneGrad clones all the genes of src to dst. It does not modify src.
func CloneGrad(dst, src GenomeGrad) error {
if dst == nil {
return errors.New("got nil destination for Clone")
} else if src == nil {
return errors.New("got nil source to Clone")
} else if dst.Len() != src.Len() {
return errors.New("destination and source mismatch")
}

for i := 0; i < dst.Len(); i++ {
dst.GetGeneGrad(i).SetValue(src.GetGeneGrad(i).Value())
}
return nil
}
53 changes: 49 additions & 4 deletions mu8_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,16 @@ func ExamplePopulation() {
}

type mygenome struct {
genoma []genes.ConstrainedNormalDistr
genoma []genes.ConstrainedNormalDistrGrad
}

func newGenome(n int) *mygenome {
return &mygenome{genoma: make([]genes.ConstrainedNormalDistr, n)}
return &mygenome{genoma: make([]genes.ConstrainedNormalDistrGrad, n)}
}

func (g *mygenome) GetGene(i int) mu8.Gene { return &g.genoma[i] }
func (g *mygenome) Len() int { return len(g.genoma) }
func (g *mygenome) GetGene(i int) mu8.Gene { return &g.genoma[i].ConstrainedNormalDistr }
func (g *mygenome) GetGeneGrad(i int) mu8.GeneGrad { return &g.genoma[i] }
func (g *mygenome) Len() int { return len(g.genoma) }

// Simulate simply adds the genes. We'd expect the genes to reach the max values of the constraint.
func (g *mygenome) Simulate(context.Context) (fitness float64) {
Expand Down Expand Up @@ -123,3 +124,47 @@ func ExampleIslands() {
// champ fitness=0.956
// champ fitness=0.956
}

func ExampleGradient() {
src := rand.NewSource(1)
const (
genomelen = 6
gradMultiplier = 10.0
epochs = 6
)
// Create new individual and mutate it randomly.
individual := newGenome(genomelen)
rng := rand.New(src)
for i := 0; i < genomelen; i++ {
individual.GetGene(i).Mutate(rng)
}
// Prepare for gradient descent.
grads := make([]float64, genomelen)
ctx := context.Background()
// Champion will harbor our best individual.
champion := newGenome(genomelen)
for epoch := 0; epoch < epochs; epoch++ {
err := mu8.Gradient(ctx, grads, individual, func() *mygenome {
return newGenome(genomelen)
})
if err != nil {
panic(err)
}
// Apply gradients.
for i := 0; i < individual.Len(); i++ {
gene := individual.GetGeneGrad(i)
grad := grads[i]
gene.SetValue(gene.Value() + grad*gradMultiplier)
}
mu8.CloneGrad(champion, individual)
fmt.Printf("fitness=%f with grads=%f\n", individual.Simulate(ctx), grads)
}

// Output:
// fitness=0.467390 with grads=[-0.055556 -0.055556 -0.055556 0.055556 0.055556 0.055556]
// fitness=0.630529 with grads=[-0.055556 -0.055556 -0.055556 0.055556 0.055556 0.055556]
// fitness=0.784850 with grads=[-0.055556 -0.055556 -0.055556 0.000000 0.055556 0.055556]
// fitness=0.913839 with grads=[-0.055556 -0.055556 -0.055556 0.000000 0.055556 0.055556]
// fitness=0.994674 with grads=[-0.055556 -0.055556 -0.055556 0.000000 0.055556 0.055556]
// fitness=1.000000 with grads=[-0.055556 -0.055556 -0.055556 0.000000 0.000000 0.000000]
}

0 comments on commit 1a2fca7

Please sign in to comment.