Skip to content

Commit

Permalink
Prepare for release: Fix formatting, gh-pages script, bump version nu…
Browse files Browse the repository at this point in the history
…mber
  • Loading branch information
lukstafi committed Dec 18, 2024
1 parent 9e9e35f commit 6180fdc
Show file tree
Hide file tree
Showing 8 changed files with 15 additions and 17 deletions.
6 changes: 3 additions & 3 deletions arrayjit.opam
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This file is generated by dune, edit dune-project instead
opam-version: "2.0"
version: "0.4.1"
version: "0.5.0.beta"
synopsis:
"An array language compiler with multiple backends (CPU, CUDA), staged compilation"
description:
Expand Down Expand Up @@ -37,11 +37,11 @@ depends: [
"odoc" {with-doc}
]
depopts: [
"cudajit" {>= "0.6.0"}
"cudajit" {>= "0.6.1"}
"gccjit" {>= "0.3.2"}
]
conflicts: [
"cudajit" {< "0.6.0"}
"cudajit" {< "0.6.1"}
"gccjit" {< "0.3.2"}
]
build: [
Expand Down
2 changes: 1 addition & 1 deletion arrayjit/lib/assignments.ml
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ let fprint_hum ?name ?static_indices () ppf c =

let%track6_sexp lower ~unoptim_ll_source ~ll_source ~cd_source ~name static_indices (proc : t) :
Low_level.optimized =
let llc: Low_level.t = to_low_level proc in
let llc : Low_level.t = to_low_level proc in
(* Generate the low-level code before outputting the assignments, to force projections. *)
(match cd_source with
| None -> ()
Expand Down
4 changes: 1 addition & 3 deletions arrayjit/lib/backends.ml
Original file line number Diff line number Diff line change
Expand Up @@ -460,9 +460,7 @@ let finalize (type buffer_ptr dev runner event)
and type runner = runner
and type event = event) (ctx : Backend.context) : unit =
Option.iter Backend.free_buffer ~f:(fun mem_free ->
if
Atomic.compare_and_set ctx.finalized false true
then (
if Atomic.compare_and_set ctx.finalized false true then (
Backend.await ctx.stream;
Map.iteri ctx.ctx_arrays ~f:(fun ~key ~data ->
if
Expand Down
4 changes: 1 addition & 3 deletions bin/hello_world_op.ml
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,7 @@ let%track2_sexp _Very_big_tensor (() : unit) : unit =
let stream = Backend.(new_stream @@ get_device ~ordinal:0) in
let ctx = Backend.make_context stream in
Rand.init 0;
let hey =
TDSL.range_of_shape ~batch_dims:[ 6 ] ~input_dims:[ 7; 8 ] ~output_dims:[ 9 ] ()
in
let hey = TDSL.range_of_shape ~batch_dims:[ 6 ] ~input_dims:[ 7; 8 ] ~output_dims:[ 9 ] () in
let%op ye = (hey * (1 + 1)) - 10 in
Train.forward_and_forget backend ctx ye;
Tensor.print ~with_code:false ~with_grad:false `Default hey;
Expand Down
6 changes: 4 additions & 2 deletions bin/moons_benchmark.ml
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,9 @@ let classify_moons ~seed ~on_device ~inlining_cutoff ~num_streams ~batch_size ~b
result_label = "init time in sec, min loss, last loss";
result =
[%sexp_of: float * float * float]
(init_time_in_sec, List.reduce_exn rev_epoch_losses ~f:Float.min, List.hd_exn rev_epoch_losses);
( init_time_in_sec,
List.reduce_exn rev_epoch_losses ~f:Float.min,
List.hd_exn rev_epoch_losses );
}
in
Stdio.printf "\n\n%!";
Expand All @@ -211,7 +213,7 @@ let _cuda_benchmarks =
~f:(fun batch_size ->
List.concat_map [ (* 0; 1; 2; *) 3 ] ~f:(fun inlining_cutoff ->
List.concat_map [ (* 1; 3; *) 7 (* *) ] ~f:(fun seed ->
List.concat_map [ (* "gccjit" ; "cuda";"sync_cc" ; *) "cc"]
List.concat_map [ (* "gccjit" ; "cuda";"sync_cc" ; *) "cc" ]
~f:(fun backend_name ->
List.concat_map [ (* CDSL.double; *) CDSL.single (* ; CDSL.half *) ]
~f:(fun value_prec ->
Expand Down
6 changes: 3 additions & 3 deletions dune-project
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

(name ocannl)

(version 0.4.1)
(version 0.5.0.beta)

(generate_opam_files true)

Expand Down Expand Up @@ -80,12 +80,12 @@
(>= 2.0.0)))
(depopts
(cudajit
(>= 0.6.0))
(>= 0.6.1))
(gccjit
(>= 0.3.2)))
(conflicts
(cudajit
(< 0.6.0))
(< 0.6.1))
(gccjit
(< 0.3.2)))
(tags
Expand Down
2 changes: 1 addition & 1 deletion neural_nets_lib.opam
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This file is generated by dune, edit dune-project instead
opam-version: "2.0"
version: "0.4.1"
version: "0.5.0.beta"
synopsis:
"A from-scratch Deep Learning framework with an optimizing compiler, shape inference, concise syntax"
description:
Expand Down
2 changes: 1 addition & 1 deletion ocannl_npy.opam
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# This file is generated by dune, edit dune-project instead
opam-version: "2.0"
version: "0.4.1"
version: "0.5.0.beta"
synopsis: "Numpy file format support for ocaml"
maintainer: ["Lukasz Stafiniak <[email protected]>"]
authors: ["Laurent Mazare"]
Expand Down

0 comments on commit 6180fdc

Please sign in to comment.