-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add autodiff module and some examples.
- Loading branch information
Showing
4 changed files
with
68 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
-- The following example is ported from the Jax cookbook at | ||
-- https://jax.readthedocs.io/en/latest/notebooks/autodiff_cookbook.html | ||
|
||
import "../lib/github.com/diku-dk/autodiff/autodiff" | ||
|
||
def dot a b = f64.sum (map2 (*) a b) | ||
|
||
def sigmoid x = | ||
0.5 * (f64.tanh(x/2)+1) | ||
|
||
def predict W b inputs = | ||
sigmoid(dot inputs W + b) | ||
|
||
def inputs : [][]f64 = | ||
[[0.52, 1.12, 0.77], | ||
[0.88, -1.08, 0.15], | ||
[0.52, 0.06, -1.30], | ||
[0.74, -2.49, 1.39]] | ||
|
||
def targets = | ||
[true,true,false,true] | ||
|
||
def vecadd = map2 (f64.+) | ||
def vecmul = map2 (f64.*) | ||
|
||
def loss W b = | ||
let preds = map (predict W b) inputs | ||
let label_probs = (preds `vecmul` map f64.bool targets) | ||
`vecadd` (map (1-) preds `vecmul` | ||
map ((1-) <-< f64.bool) targets) | ||
in -f64.sum(map f64.log label_probs) | ||
|
||
-- Not going to import random number generation just for this. These | ||
-- are made with 'futhark dataset'. | ||
def W = [0.12152684143560777f64, 0.5526745035133085f64, 0.5189896463245001f64] | ||
def b = 0.12152684143560777f64 | ||
|
||
def Wb_grad = grad64 (\(W,b) -> loss W b) (W,b) | ||
def W_grad = Wb_grad.0 | ||
def b_grad = Wb_grad.1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
-- | Various utilities for performing AD. | ||
|
||
import "onehot" | ||
|
||
local def singular 'a (x: onehot.gen [1] a) = onehot.onehot x 0 | ||
|
||
-- | Compute the gradient of a scalar-valued function given a one-hot | ||
-- generator for its result. | ||
def grad_unit gen f x = vjp f x (singular gen) | ||
|
||
-- | Convenience function for computing the gradient of an | ||
-- 'f64'-valued differentiable function. | ||
def grad32 = grad_unit onehot.f32 | ||
|
||
-- | Convenience function for computing the gradient of an | ||
-- 'f64'-valued differentiable function. | ||
def grad64 = grad_unit onehot.f64 | ||
|
||
-- | Compute the gradient of an arbitrary differentiable function | ||
-- given a one-hot generator for its result. | ||
def grad_rev gen f x = map (vjp f x) (onehots gen) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters