From 3e93b948a2ecff482e0bd2530abcc32dd73bd45a Mon Sep 17 00:00:00 2001 From: jedel1043 Date: Sat, 4 Jan 2025 15:38:50 -0600 Subject: [PATCH] Enable sm test suite and fix ephemeron bug --- .vscode/launch.json | 2 +- core/engine/src/builtins/array_buffer/mod.rs | 1 + .../engine/src/builtins/array_buffer/utils.rs | 41 ++++++ .../src/builtins/typed_array/builtin.rs | 138 ++++++++++-------- core/engine/src/environments/runtime/mod.rs | 1 + core/gc/src/lib.rs | 53 ++----- core/gc/src/test/weak.rs | 44 +++++- core/gc/src/trace.rs | 16 +- test262_config.toml | 12 +- tests/tester/src/main.rs | 11 +- tests/tester/src/read.rs | 36 +++-- 11 files changed, 234 insertions(+), 121 deletions(-) diff --git a/.vscode/launch.json b/.vscode/launch.json index 3b441e7c6fc..e4ae009700c 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -42,7 +42,7 @@ "program": "${workspaceFolder}/target/debug/boa_tester.exe" }, "program": "${workspaceFolder}/target/debug/boa_tester", - "args": ["run", "-s", "${input:testPath}", "-vvv"], + "args": ["run", "-s", "${input:testPath}", "-vvv", "-d"], "sourceLanguages": ["rust"], "preLaunchTask": "Cargo Build boa_tester" } diff --git a/core/engine/src/builtins/array_buffer/mod.rs b/core/engine/src/builtins/array_buffer/mod.rs index 921fe1a0289..7406017d81d 100644 --- a/core/engine/src/builtins/array_buffer/mod.rs +++ b/core/engine/src/builtins/array_buffer/mod.rs @@ -158,6 +158,7 @@ impl BufferObject { /// Gets the mutable buffer data of the object #[inline] + #[track_caller] pub(crate) fn as_buffer_mut( &self, ) -> BufferRefMut< diff --git a/core/engine/src/builtins/array_buffer/utils.rs b/core/engine/src/builtins/array_buffer/utils.rs index d3c2e9b6c8d..15be2ff304d 100644 --- a/core/engine/src/builtins/array_buffer/utils.rs +++ b/core/engine/src/builtins/array_buffer/utils.rs @@ -211,6 +211,19 @@ impl SliceRefMut<'_> { } } + /// Gets a subslice of this `SliceRefMut`. + pub(crate) fn subslice(&self, index: I) -> SliceRef<'_> + where + I: SliceIndex<[u8], Output = [u8]> + SliceIndex<[AtomicU8], Output = [AtomicU8]>, + { + match self { + Self::Slice(buffer) => SliceRef::Slice(buffer.get(index).expect("index out of bounds")), + Self::AtomicSlice(buffer) => { + SliceRef::AtomicSlice(buffer.get(index).expect("index out of bounds")) + } + } + } + /// Gets a mutable subslice of this `SliceRefMut`. pub(crate) fn subslice_mut(&mut self, index: I) -> SliceRefMut<'_> where @@ -403,6 +416,34 @@ pub(crate) unsafe fn memcpy(src: BytesConstPtr, dest: BytesMutPtr, count: usize) } } +/// Copies `count` bytes from the position `from` to the position `to` in `buffer`, but always +/// copying from left to right. +/// +/// +/// # Safety +/// +/// - `ptr` must be valid from the offset `ptr + from` for `count` reads of bytes. +/// - `ptr` must be valid from the offset `ptr + to` for `count` writes of bytes. +// This looks like a worse version of `memmove`... and it is exactly that... +// but it's the correct behaviour for a weird usage of `%TypedArray%.prototype.slice` so ¯\_(ツ)_/¯. +// Obviously don't use this if you need to implement something that requires a "proper" memmove. +pub(crate) unsafe fn memmove_naive(ptr: BytesMutPtr, from: usize, to: usize, count: usize) { + match ptr { + // SAFETY: The invariants of this operation are ensured by the caller of the function. + BytesMutPtr::Bytes(ptr) => unsafe { + for i in 0..count { + ptr::copy(ptr.add(from + i), ptr.add(to + i), 1); + } + }, + // SAFETY: The invariants of this operation are ensured by the caller of the function. + BytesMutPtr::AtomicBytes(ptr) => unsafe { + let src = ptr.add(from); + let dest = ptr.add(to); + copy_shared_to_shared(src, dest, count); + }, + } +} + /// Copies `count` bytes from the position `from` to the position `to` in `buffer`. /// /// # Safety diff --git a/core/engine/src/builtins/typed_array/builtin.rs b/core/engine/src/builtins/typed_array/builtin.rs index 51690f9fb6a..eeeb1edee72 100644 --- a/core/engine/src/builtins/typed_array/builtin.rs +++ b/core/engine/src/builtins/typed_array/builtin.rs @@ -9,7 +9,7 @@ use num_traits::Zero; use super::{ object::typed_array_set_element, ContentType, TypedArray, TypedArrayKind, TypedArrayMarker, }; -use crate::value::JsVariant; +use crate::{builtins::array_buffer::utils::memmove_naive, value::JsVariant}; use crate::{ builtins::{ array::{find_via_predicate, ArrayIterator, Direction}, @@ -2048,8 +2048,8 @@ impl BuiltinTypedArray { // a. Set taRecord to MakeTypedArrayWithBufferWitnessRecord(O, seq-cst). // b. If IsTypedArrayOutOfBounds(taRecord) is true, throw a TypeError exception. - let src_buf_borrow = src_borrow.data.viewed_array_buffer().as_buffer(); - let Some(src_buf) = src_buf_borrow + let mut src_buf_borrow = src_borrow.data.viewed_array_buffer().as_buffer_mut(); + let Some(mut src_buf) = src_buf_borrow .bytes(Ordering::SeqCst) .filter(|s| !src_borrow.data.is_out_of_bounds(s.len())) else { @@ -2067,41 +2067,88 @@ impl BuiltinTypedArray { // f. Let targetType be TypedArrayElementType(A). let target_type = target_borrow.data.kind(); + if src_type != target_type { + // h. Else, + drop(src_buf_borrow); + drop((src_borrow, target_borrow)); + + // i. Let n be 0. + // ii. Let k be startIndex. + // iii. Repeat, while k < endIndex, + let src = src.upcast(); + let target = target.upcast(); + for (n, k) in (start_index..end_index).enumerate() { + // 1. Let Pk be ! ToString(𝔽(k)). + // 2. Let kValue be ! Get(O, Pk). + let k_value = src.get(k, context).expect("Get cannot fail here"); + + // 3. Perform ! Set(A, ! ToString(𝔽(n)), kValue, true). + target + .set(n, k_value, true, context) + .expect("Set cannot fail here"); + + // 4. Set k to k + 1. + // 5. Set n to n + 1. + } + + // 15. Return A. + return Ok(target.into()); + } + // g. If srcType is targetType, then - if src_type == target_type { - { - let byte_count = count * src_type.element_size() as usize; + { + let byte_count = count * src_type.element_size() as usize; - // i. NOTE: The transfer must be performed in a manner that preserves the bit-level encoding of the source data. - // ii. Let srcBuffer be O.[[ViewedArrayBuffer]]. - // iii. Let targetBuffer be A.[[ViewedArrayBuffer]]. - let target_borrow = target_borrow; - let mut target_buf = target_borrow.data.viewed_array_buffer().as_buffer_mut(); - let mut target_buf = target_buf - .bytes_with_len(byte_count) - .expect("newly created array cannot be detached"); + // i. NOTE: The transfer must be performed in a manner that preserves the bit-level encoding of the source data. + // ii. Let srcBuffer be O.[[ViewedArrayBuffer]]. + // iii. Let targetBuffer be A.[[ViewedArrayBuffer]]. + + // iv. Let elementSize be TypedArrayElementSize(O). + let element_size = src_type.element_size(); - // iv. Let elementSize be TypedArrayElementSize(O). - let element_size = src_type.element_size(); + // v. Let srcByteOffset be O.[[ByteOffset]]. + let src_byte_offset = src_borrow.data.byte_offset(); - // v. Let srcByteOffset be O.[[ByteOffset]]. - let src_byte_offset = src_borrow.data.byte_offset(); + // vi. Let srcByteIndex be (startIndex × elementSize) + srcByteOffset. + let src_byte_index = (start_index * element_size + src_byte_offset) as usize; - // vi. Let srcByteIndex be (startIndex × elementSize) + srcByteOffset. - let src_byte_index = (start_index * element_size + src_byte_offset) as usize; + // vii. Let targetByteIndex be A.[[ByteOffset]]. + let target_byte_index = target_borrow.data.byte_offset() as usize; - // vii. Let targetByteIndex be A.[[ByteOffset]]. - let target_byte_index = target_borrow.data.byte_offset() as usize; + // viii. Let endByteIndex be targetByteIndex + (countBytes × elementSize). + // Not needed by the impl. - // viii. Let endByteIndex be targetByteIndex + (countBytes × elementSize). - // Not needed by the impl. + // ix. Repeat, while targetByteIndex < endByteIndex, + // 1. Let value be GetValueFromBuffer(srcBuffer, srcByteIndex, uint8, true, unordered). + // 2. Perform SetValueInBuffer(targetBuffer, targetByteIndex, uint8, value, true, unordered). + // 3. Set srcByteIndex to srcByteIndex + 1. + // 4. Set targetByteIndex to targetByteIndex + 1. + if BufferObject::equals( + src_borrow.data.viewed_array_buffer(), + target_borrow.data.viewed_array_buffer(), + ) { + // cannot borrow the target mutably (overlapping bytes), but we can move the data instead. - // ix. Repeat, while targetByteIndex < endByteIndex, - // 1. Let value be GetValueFromBuffer(srcBuffer, srcByteIndex, uint8, true, unordered). - // 2. Perform SetValueInBuffer(targetBuffer, targetByteIndex, uint8, value, true, unordered). - // 3. Set srcByteIndex to srcByteIndex + 1. - // 4. Set targetByteIndex to targetByteIndex + 1. + #[cfg(debug_assertions)] + { + assert!(src_buf.subslice_mut(src_byte_index..).len() >= byte_count); + assert!(src_buf.subslice_mut(target_byte_index..).len() >= byte_count); + } + // SAFETY: All previous checks put the copied bytes at least within the bounds of `src_buf`. + unsafe { + memmove_naive( + src_buf.as_ptr(), + src_byte_index, + target_byte_index, + byte_count, + ); + } + } else { + let mut target_buf = target_borrow.data.viewed_array_buffer().as_buffer_mut(); + let mut target_buf = target_buf + .bytes(Ordering::SeqCst) + .expect("newly created array cannot be detached"); let src = src_buf.subslice(src_byte_index..); let mut target = target_buf.subslice_mut(target_byte_index..); @@ -2118,36 +2165,11 @@ impl BuiltinTypedArray { memcpy(src.as_ptr(), target.as_ptr(), byte_count); } } - - // 15. Return A. - Ok(target.upcast().into()) - } else { - // h. Else, - drop(src_buf_borrow); - drop((src_borrow, target_borrow)); - - // i. Let n be 0. - // ii. Let k be startIndex. - // iii. Repeat, while k < endIndex, - let src = src.upcast(); - let target = target.upcast(); - for (n, k) in (start_index..end_index).enumerate() { - // 1. Let Pk be ! ToString(𝔽(k)). - // 2. Let kValue be ! Get(O, Pk). - let k_value = src.get(k, context).expect("Get cannot fail here"); - - // 3. Perform ! Set(A, ! ToString(𝔽(n)), kValue, true). - target - .set(n, k_value, true, context) - .expect("Set cannot fail here"); - - // 4. Set k to k + 1. - // 5. Set n to n + 1. - } - - // 15. Return A. - Ok(target.into()) } + + drop(target_borrow); + // 15. Return A. + Ok(target.upcast().into()) } /// `%TypedArray%.prototype.some ( callbackfn [ , thisArg ] )` diff --git a/core/engine/src/environments/runtime/mod.rs b/core/engine/src/environments/runtime/mod.rs index 4dce94a9f89..cf711f2383b 100644 --- a/core/engine/src/environments/runtime/mod.rs +++ b/core/engine/src/environments/runtime/mod.rs @@ -506,6 +506,7 @@ impl Context { /// # Panics /// /// Panics if the environment or binding index are out of range. + #[track_caller] pub(crate) fn get_binding(&mut self, locator: &BindingLocator) -> JsResult> { match locator.scope() { BindingLocatorScope::GlobalObject => { diff --git a/core/gc/src/lib.rs b/core/gc/src/lib.rs index 9fbac483b23..5f31fa47e1a 100644 --- a/core/gc/src/lib.rs +++ b/core/gc/src/lib.rs @@ -325,23 +325,9 @@ impl Collector { if node_ref.is_rooted() { tracer.enqueue(*node); - while let Some(node) = tracer.next() { - // SAFETY: the gc heap object should be alive if there is a root. - let node_ref = unsafe { node.as_ref() }; - - if !node_ref.header.is_marked() { - node_ref.header.mark(); - - // SAFETY: if `GcBox::trace_inner()` has been called, then, - // this box must have been deemed as reachable via tracing - // from a root, which by extension means that value has not - // been dropped either. - - let trace_fn = node_ref.trace_fn(); - - // SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable. - unsafe { trace_fn(node, tracer) } - } + // SAFETY: all nodes must be valid as this phase cannot drop any node. + unsafe { + tracer.trace_until_empty(); } } else if !node_ref.is_marked() { strong_dead.push(*node); @@ -378,14 +364,9 @@ impl Collector { pending_ephemerons.push(*eph); } - while let Some(node) = tracer.next() { - // SAFETY: node must be valid as this phase cannot drop any node. - let trace_fn = unsafe { node.as_ref() }.trace_fn(); - - // SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable. - unsafe { - trace_fn(node, tracer); - } + // SAFETY: all nodes must be valid as this phase cannot drop any node. + unsafe { + tracer.trace_until_empty(); } } @@ -397,14 +378,9 @@ impl Collector { // SAFETY: The garbage collector ensures that all nodes are valid. unsafe { node_ref.trace(tracer) }; - while let Some(node) = tracer.next() { - // SAFETY: node must be valid as this phase cannot drop any node. - let trace_fn = unsafe { node.as_ref() }.trace_fn(); - - // SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable. - unsafe { - trace_fn(node, tracer); - } + // SAFETY: all nodes must be valid as this phase cannot drop any node. + unsafe { + tracer.trace_until_empty(); } } @@ -419,14 +395,9 @@ impl Collector { // SAFETY: the garbage collector ensures `eph_ref` always points to valid data. let is_key_marked = unsafe { !eph_ref.trace(tracer) }; - while let Some(node) = tracer.next() { - // SAFETY: node must be valid as this phase cannot drop any node. - let trace_fn = unsafe { node.as_ref() }.trace_fn(); - - // SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable. - unsafe { - trace_fn(node, tracer); - } + // SAFETY: all nodes must be valid as this phase cannot drop any node. + unsafe { + tracer.trace_until_empty(); } is_key_marked diff --git a/core/gc/src/test/weak.rs b/core/gc/src/test/weak.rs index 30a7d5b3b29..b67f9791b7e 100644 --- a/core/gc/src/test/weak.rs +++ b/core/gc/src/test/weak.rs @@ -2,7 +2,8 @@ use std::{cell::Cell, rc::Rc}; use super::run_test; use crate::{ - force_collect, test::Harness, Ephemeron, Finalize, Gc, GcBox, GcRefCell, Trace, WeakGc, + force_collect, internals::EphemeronBox, test::Harness, Ephemeron, Finalize, Gc, GcBox, + GcRefCell, Trace, WeakGc, }; #[test] @@ -290,3 +291,44 @@ fn eph_gc_finalizer() { assert_eq!(val.inner.get(), 1); }); } + +#[test] +fn eph_strong_self_reference() { + type Inner = GcRefCell<(Option, Option)>; + #[derive(Trace, Finalize, Clone)] + struct TestCell { + inner: Gc, + } + run_test(|| { + let root = TestCell { + inner: Gc::new(GcRefCell::new((None, None))), + }; + let root_size = size_of::>(); + + Harness::assert_exact_bytes_allocated(root_size); + + let watched = Gc::new(0); + let watched_size = size_of::>(); + + { + let eph = Ephemeron::new(&watched, root.clone()); + let eph_size = size_of::, TestCell>>(); + + root.inner.borrow_mut().0 = Some(root.clone()); + root.inner.borrow_mut().1 = Some(root.clone()); + + force_collect(); + + assert!(eph.value().is_some()); + Harness::assert_exact_bytes_allocated(root_size + eph_size + watched_size); + } + + force_collect(); + + drop(watched); + + force_collect(); + + Harness::assert_exact_bytes_allocated(root_size); + }); +} diff --git a/core/gc/src/trace.rs b/core/gc/src/trace.rs index 12a456dd4c9..c45b7b91c54 100644 --- a/core/gc/src/trace.rs +++ b/core/gc/src/trace.rs @@ -34,8 +34,20 @@ impl Tracer { self.queue.push_back(node); } - pub(crate) fn next(&mut self) -> Option { - self.queue.pop_front() + // SAFETY: All the pointers inside of the queue must point to valid memory. + pub(crate) unsafe fn trace_until_empty(&mut self) { + while let Some(node) = self.queue.pop_front() { + let node_ref = unsafe { node.as_ref() }; + if node_ref.is_marked() { + continue; + } + node_ref.header.mark(); + let trace_fn = node_ref.trace_fn(); + + // SAFETY: The function pointer is appropriate for this node type because we extract it from it's VTable. + // Additionally, the node pointer is valid per the caller's guarantee. + unsafe { trace_fn(node, self) } + } } pub(crate) fn is_empty(&mut self) -> bool { diff --git a/test262_config.toml b/test262_config.toml index 35c54989a2d..b84b16b1ec2 100644 --- a/test262_config.toml +++ b/test262_config.toml @@ -78,4 +78,14 @@ features = [ "caller", ] -tests = ["sm"] +tests = [ + # Should throw an OOM error instead of filling the whole RAM. + "test/staging/sm/String/replace-math.js", + # Panics + "test/staging/sm/TypedArray/slice-memcpy.js", + "test/staging/sm/expressions/optional-chain-first-expression.js", + "test/staging/sm/regress/regress-554955-2.js", + "test/staging/sm/class/fields-static-class-name-binding-eval.js", + "test/staging/sm/regress/regress-554955-1.js", + "test/staging/sm/regress/regress-554955-3.js" +] diff --git a/tests/tester/src/main.rs b/tests/tester/src/main.rs index 8b35af0fb6a..986ee338f8f 100644 --- a/tests/tester/src/main.rs +++ b/tests/tester/src/main.rs @@ -82,10 +82,15 @@ struct Ignored { } impl Ignored { - /// Checks if the ignore list contains the given test name in the list of + /// Checks if the ignore list contains the given test in the list of /// tests to ignore. - pub(crate) fn contains_test(&self, test: &str) -> bool { - self.tests.contains(test) + pub(crate) fn contains_test(&self, test_path: &Path) -> bool { + let Some(test_path) = test_path.to_str() else { + return false; + }; + self.tests + .iter() + .any(|ignored| test_path.contains(&**ignored)) } /// Checks if the ignore list contains the given feature name in the list diff --git a/tests/tester/src/read.rs b/tests/tester/src/read.rs index bbd437d799c..1cc85a3a925 100644 --- a/tests/tester/src/read.rs +++ b/tests/tester/src/read.rs @@ -95,11 +95,19 @@ pub(super) enum TestFlag { pub(super) fn read_harness(test262_path: &Path) -> Result { let mut includes: HashMap, HarnessFile, FxBuildHasher> = FxHashMap::default(); - read_harness_dir(&test262_path.join("harness"), &mut includes)?; + let harness_path = &test262_path.join("harness"); - let assert = read_harness_file(test262_path.join("harness/assert.js"))?; - let sta = read_harness_file(test262_path.join("harness/sta.js"))?; - let doneprint_handle = read_harness_file(test262_path.join("harness/doneprintHandle.js"))?; + read_harness_dir(harness_path, harness_path, &mut includes)?; + + let assert = includes + .remove("assert.js") + .ok_or_eyre("failed to load harness file `assert.js`")?; + let sta = includes + .remove("sta.js") + .ok_or_eyre("failed to load harness file `sta.js`")?; + let doneprint_handle = includes + .remove("doneprintHandle.js") + .ok_or_eyre("failed to load harness file `donePrintHandle.js`")?; Ok(Harness { assert, @@ -110,24 +118,24 @@ pub(super) fn read_harness(test262_path: &Path) -> Result { } fn read_harness_dir( + harness_root: &Path, directory_name: &Path, includes: &mut HashMap, HarnessFile, FxBuildHasher>, ) -> Result<()> { for entry in fs::read_dir(directory_name).wrap_err("error reading the harness directory")? { let entry = entry?; - let file_name = entry.file_name(); - let file_name = file_name.to_string_lossy().into_owned(); + let entry_path = entry.path(); - if directory_name.join(file_name.clone()).is_dir() { - read_harness_dir(&directory_name.join(file_name), includes)?; + if entry.file_type()?.is_dir() { + read_harness_dir(harness_root, &entry_path, includes)?; continue; } - if file_name == "assert.js" || file_name == "sta.js" || file_name == "doneprintHandle.js" { - continue; - } + let key = entry_path + .strip_prefix(harness_root) + .wrap_err("invalid harness file path")?; - includes.insert(file_name.into_boxed_str(), read_harness_file(entry.path())?); + includes.insert(key.to_string_lossy().into(), read_harness_file(entry_path)?); } Ok(()) @@ -154,7 +162,7 @@ pub(super) fn read_suite( .and_then(OsStr::to_str) .ok_or_eyre("invalid path for test suite")?; - ignore_suite |= ignored.contains_test(name); + ignore_suite |= ignored.contains_test(path); let mut suites = Vec::new(); let mut tests = Vec::new(); @@ -198,7 +206,7 @@ pub(super) fn read_suite( if ignore_suite || ignored.contains_any_flag(test.flags) - || ignored.contains_test(&test.name) + || ignored.contains_test(&test.path) || test .features .iter()