Skip to content

Commit

Permalink
2.0.4
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Feb 15, 2025
1 parent fd70e3e commit 6739534
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "x-transformers"
version = "2.0.2"
version = "2.0.4"
description = "X-Transformers"
authors = [
{ name = "Phil Wang", email = "[email protected]" }
Expand Down
2 changes: 1 addition & 1 deletion x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1282,7 +1282,7 @@ def __init__(
dim_kv_input = dim_latent_kv

if exists(latent_rope_subheads):
assert not exists(rotate_num_heads)
assert not exists(rotate_num_heads), '`rotate_num_heads` cannot be set when multi-latent attention is being used'
rotate_num_heads = latent_rope_subheads

k_dim = dim_head * (kv_heads - latent_rope_subheads)
Expand Down

0 comments on commit 6739534

Please sign in to comment.