-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathdeepForest.go
126 lines (120 loc) · 3.39 KB
/
deepForest.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
package randomforest
import (
"fmt"
"math/rand"
)
// DeepForest deep forest implementation where is standard forest, mini forests (Groves) and final ForestDeep (Forest + Groves)
type DeepForest struct {
Forest *Forest
ForestDeep Forest
Groves []Forest
NGroves int
NFeatures int
NTrees int
RandomFeatures [][]int
ResultFeatures [][]float64
Results []float64
}
// BuildDeepForest create DeepForest from Forest
func (forest *Forest) BuildDeepForest() DeepForest {
forest.defaults()
df := DeepForest{
Forest: forest,
NFeatures: forest.MFeatures,
}
return df
}
// Train DeepForest with parameters of number of groves, number of trees in groves, number of trees in final Deep Forest
func (dForest *DeepForest) Train(groves int, trees int, deepTrees int) {
dForest.NGroves = groves
dForest.NTrees = trees
//build and train groves
dForest.Groves = []Forest{}
dForest.RandomFeatures = make([][]int, 0)
dForest.ResultFeatures = make([][]float64, dForest.Forest.NSize)
dForest.Results = make([]float64, 0)
for i := 0; i < dForest.Forest.NSize; i++ {
dForest.ResultFeatures[i] = make([]float64, 0)
}
{
for i := 0; i < dForest.NGroves; i++ {
//create grove
grove := Forest{}
x := make([][]float64, 0)
perm := rand.Perm(dForest.Forest.Features)
perm = perm[:dForest.NFeatures]
dForest.RandomFeatures = append(dForest.RandomFeatures, perm)
for _, datax := range dForest.Forest.Data.X {
pdatax := make([]float64, dForest.NFeatures)
for j, p := range perm {
pdatax[j] = datax[p]
}
x = append(x, pdatax)
}
grove.Data = ForestData{X: x, Class: dForest.Forest.Data.Class}
//train grove
grove.Train(dForest.NTrees)
dForest.Groves = append(dForest.Groves, grove)
//store results
p := 0
for j, datax := range x {
vote := grove.Vote(datax)
dForest.ResultFeatures[j] = append(dForest.ResultFeatures[j], vote...)
bestI := -1
bestV := 0.0
for k, v := range vote {
if v > bestV {
bestV = v
bestI = k
}
}
if grove.Data.Class[j] == bestI {
p++
}
}
dForest.Results = append(dForest.Results, float64(p)/float64(dForest.Forest.NSize))
fmt.Println("Grove", i, float64(p)/float64(dForest.Forest.NSize))
}
}
//create deep forest
{
x := make([][]float64, dForest.Forest.NSize)
for i := 0; i < dForest.Forest.NSize; i++ {
x[i] = make([]float64, dForest.Forest.Features)
copy(x[i], dForest.Forest.Data.X[i])
x[i] = append(x[i], dForest.ResultFeatures[i]...)
}
deepData := ForestData{X: x, Class: dForest.Forest.Data.Class}
dForest.ForestDeep = Forest{
Data: deepData,
}
}
//build and train deep forest
dForest.ForestDeep.Train(deepTrees)
}
// Vote return result of DeepForest
func (dForest *DeepForest) Vote(x []float64) []float64 {
//groves
deepx := make([]float64, len(x))
copy(deepx, x)
for i, g := range dForest.Groves {
gX := make([]float64, 0)
for _, p := range dForest.RandomFeatures[i] {
gX = append(gX, x[p])
}
v := g.Vote(gX)
deepx = append(deepx, v...)
}
//deep tree
votes := make([]float64, dForest.ForestDeep.Classes)
for i := 0; i < dForest.ForestDeep.NTrees; i++ {
v := dForest.ForestDeep.Trees[i].vote(deepx)
for j := 0; j < dForest.ForestDeep.Classes; j++ {
votes[j] += v[j]
}
}
for j := 0; j < dForest.ForestDeep.Classes; j++ {
votes[j] = votes[j] / float64(dForest.ForestDeep.NTrees)
}
return votes
}