Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/inc cache propagation #1220

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -938,7 +938,7 @@ impl IntoDiagnostics<FileId> for EvalError {
)])]
}
EvalError::UnboundIdentifier(ident, span_opt) => vec![Diagnostic::error()
.with_message("unbound identifier")
.with_message(format!("unbound identifier {ident}"))
.with_labels(vec![primary_alt(
span_opt.into_opt(),
ident.to_string(),
Expand Down
139 changes: 73 additions & 66 deletions src/eval/cache/incremental.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::collections::{HashMap, HashSet};

//! A [Cache] implementation with incremental computation features.
use std::collections::{HashMap, HashSet};
use super::{BlackholedError, Cache, CacheIndex, Closure, Environment, IdentKind};
use crate::{
identifier::Ident,
Expand Down Expand Up @@ -70,7 +69,7 @@ impl IncCache {
idx
}

fn revnode_as_explicit_fun<'a, I>(node: &IncNode, args: I) -> IncNode
fn revnode_as_explicit_fun<'a, I>(node: &mut IncNode, args: I)
where
I: DoubleEndedIterator<Item = &'a Ident>,
{
Expand All @@ -85,16 +84,12 @@ impl IncCache {
let as_function =
args.rfold(body, |built, id| RichTerm::from(Term::Fun(*id, built)));

IncNode::new(
Closure {
body: as_function,
env,
},
node.kind,
node.bty.clone(),
)
node.orig = Closure {
body: as_function,
env,
}
}
_ => node.clone(),
_ => (),
}
}

Expand All @@ -106,31 +101,6 @@ impl IncCache {
}
}

fn propagate_dirty(&mut self, idx: CacheIndex) {
let mut node = self.store.get_mut(idx).unwrap();
node.cached = None;
node.state = IncNodeState::Suspended;

let mut visited = HashSet::new();
let mut stack = node.backlinks.clone();

visited.insert(idx);

while !stack.is_empty() {
let i = stack.pop().unwrap();
visited.insert(i.idx);
let mut current_node = self.store.get_mut(i.idx).unwrap();
current_node.cached = None;
current_node.state = IncNodeState::Suspended;
stack.extend(
current_node
.backlinks
.iter()
.filter(|x| !visited.contains(&x.idx)),
)
}
}

/* Do we need this when we can revert in place?

fn propagate_revert(&mut self, id: Ident, idx: CacheIndex) -> HashMap<Ident, CacheIndex> {
Expand Down Expand Up @@ -161,29 +131,6 @@ impl IncCache {

nodes_reverted
} */

fn smart_clone(&mut self, v: Vec<CacheIndex>) -> HashMap<CacheIndex, CacheIndex> {
let mut new_indices = HashMap::new();

for i in v.iter() {
let current_node = self.store.get(*i).unwrap().clone();
new_indices.insert(*i, self.add_node(current_node));
}

for i in new_indices.values() {
let current_node = self.store.get_mut(*i).unwrap();

for dep in current_node.backlinks.iter_mut() {
dep.idx = *new_indices.get(i).unwrap();
}

for dep in current_node.fwdlinks.iter_mut() {
dep.idx = *new_indices.get(i).unwrap();
}
}

new_indices
}
}

impl Cache for IncCache {
Expand Down Expand Up @@ -345,7 +292,7 @@ impl Cache for IncCache {
env: &mut Environment,
fields: I,
) -> RichTerm {
let node = self.store.get(idx).unwrap();
let node = self.store.get_mut(idx).unwrap();

let mut deps_filter: Box<dyn FnMut(&&Ident) -> bool> = match node.bty.clone() {
BindingType::Revertible(FieldDeps::Known(deps)) => {
Expand All @@ -355,13 +302,10 @@ impl Cache for IncCache {
BindingType::Normal => Box::new(|_: &&Ident| false),
};

let node_as_function = self.add_node(IncCache::revnode_as_explicit_fun(
node,
fields.clone().filter(&mut deps_filter),
));
IncCache::revnode_as_explicit_fun(node, fields.clone().filter(&mut deps_filter));

let fresh_var = Ident::fresh();
env.insert(fresh_var, node_as_function);
env.insert(fresh_var, idx);

let as_function_closurized = RichTerm::from(Term::Var(fresh_var));
let args = fields.filter_map(|id| deps_filter(&id).then(|| RichTerm::from(Term::Var(*id))));
Expand All @@ -370,4 +314,67 @@ impl Cache for IncCache {
RichTerm::from(Term::App(partial_app, arg))
})
}

fn smart_clone(&mut self, v: Vec<CacheIndex>) -> HashMap<CacheIndex, CacheIndex> {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this functions need comments inside

let mut new_indices = HashMap::new();

for i in v.iter() {
let current_node = self.store.get(*i).unwrap().clone();
new_indices.insert(*i, self.add_node(current_node));
}

for i in new_indices.values() {
let current_node = self.store.get_mut(*i).unwrap();

for dep in current_node.backlinks.iter_mut() {
dep.idx = if let Some(idx) = new_indices.get(&dep.idx) {
*idx
} else {
dep.idx
}
}

let mut to_be_updated = vec![];

for dep in current_node.fwdlinks.iter_mut() {
dep.idx = if let Some(idx) = new_indices.get(&dep.idx) {
*idx
} else {
to_be_updated.push(dep.clone());
dep.idx
}
}

for dep in to_be_updated {
let target_node = self.store.get_mut(dep.idx).unwrap();
target_node.backlinks.push(DependencyLink {
id: dep.id,
idx: *i,
});
}
}

new_indices
}

fn propagate_dirty(&mut self, indices: Vec<CacheIndex>) {
let mut visited = HashSet::new();
let mut stack = indices;

while !stack.is_empty() {
let i = stack.pop().unwrap();
visited.insert(i);
let mut current_node = self.store.get_mut(i).unwrap();
current_node.cached = None;
current_node.state = IncNodeState::Suspended;

stack.extend(
current_node
.backlinks
.iter()
.map(|x| x.idx)
.filter(|x| !visited.contains(&x)),
)
}
}
}
28 changes: 27 additions & 1 deletion src/eval/cache/lazy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ impl ThunkData {
/// inside a record may be invalidated by merging, and thus need to store the unaltered original
/// expression. Those aspects are handled and discussed in more detail in
/// [InnerThunkData].
#[derive(Clone, Debug, PartialEq)]
#[derive(Clone, Debug)]
pub struct Thunk {
data: Rc<RefCell<ThunkData>>,
ident_kind: IdentKind,
Expand Down Expand Up @@ -560,6 +560,21 @@ impl Thunk {
self.data.borrow().deps()
}
}

impl PartialEq for Thunk {
fn eq(&self, other: &Self) -> bool {
self.data.as_ptr() == other.data.as_ptr() && self.ident_kind == other.ident_kind
}
}

impl Eq for Thunk {}

impl std::hash::Hash for Thunk {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
let raw_ptr = self.data.as_ptr();
(self.ident_kind, raw_ptr).hash(state)
}
}
/// A thunk update frame.
///
/// A thunk update frame is put on the stack whenever a variable is entered, such that once this
Expand Down Expand Up @@ -692,4 +707,15 @@ impl Cache for CBNCache {
) -> Result<Self::UpdateIndex, BlackholedError> {
idx.mk_update_frame()
}

fn smart_clone(
&mut self,
v: Vec<CacheIndex>,
) -> std::collections::HashMap<CacheIndex, CacheIndex> {
v.into_iter()
.map(|idx| (idx.clone(), self.revert(&idx)))
.collect()
}

fn propagate_dirty(&mut self, _indices: Vec<CacheIndex>) {}
}
6 changes: 6 additions & 0 deletions src/eval/cache/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::collections::HashMap;

/// The Nickel generic evaluation cache. This module abstracts away the details for managing
/// suspended computations and their memoization strategies.
///
Expand Down Expand Up @@ -94,4 +96,8 @@ pub trait Cache: Clone {
&mut self,
idx: &mut CacheIndex,
) -> Result<Self::UpdateIndex, BlackholedError>;

fn smart_clone(&mut self, v: Vec<CacheIndex>) -> HashMap<CacheIndex, CacheIndex>;

fn propagate_dirty(&mut self, indices: Vec<CacheIndex>);
Comment on lines +100 to +102
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need to document those functions, even just a draft, waiting for more. Those are the cornerstones of incremental evaluation, and are now used inside merge.

}
19 changes: 13 additions & 6 deletions src/eval/fixpoint.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
//! Compute the fixpoint of a recursive record.
use std::collections::HashSet;

use super::{merge::RevertClosurize, *};
use crate::{label::Label, position::TermPos};
use crate::{label::Label, position::TermPos, term::record::FieldDeps};

// Update the environment of a term by extending it with a recursive environment. In the general
// case, the term is expected to be a variable pointing to the element to be patched. Otherwise, it's
Expand Down Expand Up @@ -83,7 +85,7 @@ pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
// so we start from in the environment of the original record.
let mut final_env = env.clone();
let id_value = Ident::fresh();
final_env.insert(id_value, idx);
final_env.insert(id_value, idx.clone());

let with_ctr_applied = PendingContract::apply_all(
RichTerm::new(Term::Var(id_value), value.pos),
Expand Down Expand Up @@ -131,10 +133,15 @@ pub fn rec_env<'a, I: Iterator<Item = (&'a Ident, &'a Field)>, C: Cache>(
env: final_env,
};

Ok((
*id,
cache.add(final_closure, IdentKind::Record, BindingType::Normal),
))
let deps = FieldDeps::from(HashSet::from([*id]));
let mut new_idx = cache.add(
final_closure,
IdentKind::Record,
BindingType::Revertible(deps),
);
cache.build_cached(&mut new_idx, &[(*id, idx)]);

Ok((*id, new_idx))
} else {
let error = EvalError::MissingFieldDef {
id: *id,
Expand Down
Loading