Skip to content

Commit

Permalink
PagingChunkSource context protocol documentation, many silly cleanups…
Browse files Browse the repository at this point in the history
…, TODOs for channel context integration (#5000)

* Solidify the "contract" re: GetContext and FillContext passed from PagingChunkSources to Pages. Add an API to ColumnChunkPageStore that will allow different contexts to be used.

* Fix JavaDoc issues in DateTimeUtils

* Random formatting, JavaDoc, inheritance, and variable name changes

---------

Co-authored-by: Shivam Malhotra <malhotraashivam@gmail.com>
rcaudy and malhotrashivam authored Jan 5, 2024
1 parent b44b14e commit 90c2cb9
Showing 44 changed files with 423 additions and 317 deletions.
Original file line number Diff line number Diff line change
@@ -10,9 +10,9 @@
* Source, Functor or Sink.
*/
public interface Context extends SafeCloseable {

/**
* Release any resources associated with this context. The context should not be used afterwards.
*/
default void close() {}

}
Original file line number Diff line number Diff line change
@@ -63,14 +63,14 @@ public CONTEXT getContext() {
* @return The context held in this Context
*/
public static <CONTEXT extends Context> CONTEXT getContext(@NotNull Context context) {
// noinspection unchecked
// noinspection unchecked,rawtypes
return (CONTEXT) ((ContextWithChunk) context).context;
}

/**
* Makes sure that the internal array (and hence the writableChunk) is at least specified size.
* Makes sure that the internal array (and hence the writableChunk) is at least the specified size.
*/
public void ensureLength(final int length) {
public void ensureSize(final int length) {
if (writableChunk.size() < length) {
if (writableChunk.capacity() < length) {
final SafeCloseable oldWritableChunk = writableChunk;
Original file line number Diff line number Diff line change
@@ -32,7 +32,7 @@ default Chunk<? extends ATTR> getChunk(@NotNull final GetContext context, @NotNu
}

@Override
default Chunk<? extends ATTR> getChunk(@NotNull final GetContext context, long firstKey, long lastKey) {
default Chunk<? extends ATTR> getChunk(@NotNull final GetContext context, final long firstKey, final long lastKey) {
try (RowSequence rowSequence = RowSequenceFactory.forRange(firstKey, lastKey)) {
return getChunk(context, rowSequence);
}
@@ -48,20 +48,25 @@ default Chunk<ATTR> getChunkByFilling(@NotNull final GetContext context, @NotNul
interface WithPrev<ATTR extends Any> extends DefaultChunkSource<ATTR>, ChunkSource.WithPrev<ATTR> {

@Override
default Chunk<? extends ATTR> getPrevChunk(@NotNull final GetContext context,
default Chunk<? extends ATTR> getPrevChunk(
@NotNull final GetContext context,
@NotNull final RowSequence rowSequence) {
return getPrevChunkByFilling(context, rowSequence);
}

@Override
default Chunk<? extends ATTR> getPrevChunk(@NotNull final GetContext context, long firstKey, long lastKey) {
default Chunk<? extends ATTR> getPrevChunk(
@NotNull final GetContext context,
final long firstKey,
final long lastKey) {
try (RowSequence rowSequence = RowSequenceFactory.forRange(firstKey, lastKey)) {
return getPrevChunk(context, rowSequence);
}
}

@FinalDefault
default Chunk<ATTR> getPrevChunkByFilling(@NotNull final GetContext context,
default Chunk<ATTR> getPrevChunkByFilling(
@NotNull final GetContext context,
@NotNull final RowSequence rowSequence) {
WritableChunk<ATTR> chunk = DefaultGetContext.getWritableChunk(context);
fillPrevChunk(DefaultGetContext.getFillContext(context), chunk, rowSequence);
@@ -72,35 +77,42 @@ default Chunk<ATTR> getPrevChunkByFilling(@NotNull final GetContext context,
default ChunkSource<ATTR> getPrevSource() {
final ChunkSource.WithPrev<ATTR> chunkSource = this;

return new ChunkSource<ATTR>() {
return new ChunkSource<>() {
@Override
public ChunkType getChunkType() {
return chunkSource.getChunkType();
}

@Override
public Chunk<? extends ATTR> getChunk(@NotNull GetContext context, @NotNull RowSequence rowSequence) {
public Chunk<? extends ATTR> getChunk(
@NotNull final GetContext context,
@NotNull final RowSequence rowSequence) {
return chunkSource.getPrevChunk(context, rowSequence);
}

@Override
public Chunk<? extends ATTR> getChunk(@NotNull GetContext context, long firstKey, long lastKey) {
public Chunk<? extends ATTR> getChunk(
@NotNull final GetContext context,
final long firstKey,
final long lastKey) {
return chunkSource.getPrevChunk(context, firstKey, lastKey);
}

@Override
public void fillChunk(@NotNull FillContext context, @NotNull WritableChunk<? super ATTR> destination,
@NotNull RowSequence rowSequence) {
public void fillChunk(
@NotNull final FillContext context,
@NotNull final WritableChunk<? super ATTR> destination,
@NotNull final RowSequence rowSequence) {
chunkSource.fillPrevChunk(context, destination, rowSequence);
}

@Override
public GetContext makeGetContext(int chunkCapacity, SharedContext sharedContext) {
public GetContext makeGetContext(final int chunkCapacity, final SharedContext sharedContext) {
return chunkSource.makeGetContext(chunkCapacity, sharedContext);
}

@Override
public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContext) {
public FillContext makeFillContext(final int chunkCapacity, final SharedContext sharedContext) {
return chunkSource.makeFillContext(chunkCapacity, sharedContext);
}
};
@@ -113,14 +125,15 @@ public FillContext makeFillContext(int chunkCapacity, SharedContext sharedContex
*/
interface SupportsContiguousGet<ATTR extends Any> extends DefaultChunkSource<ATTR> {
@Override
default Chunk<? extends ATTR> getChunk(@NotNull final GetContext context,
default Chunk<? extends ATTR> getChunk(
@NotNull final GetContext context,
@NotNull final RowSequence rowSequence) {
return rowSequence.isContiguous()
? getChunk(context, rowSequence.firstRowKey(), rowSequence.lastRowKey())
: getChunkByFilling(context, rowSequence);
}

@Override
Chunk<? extends ATTR> getChunk(@NotNull final GetContext context, long firstKey, long lastKey);
Chunk<? extends ATTR> getChunk(@NotNull GetContext context, long firstKey, long lastKey);
}
}
Original file line number Diff line number Diff line change
@@ -26,8 +26,8 @@ default long lastRow(final long row) {

@Override
@FinalDefault
default long maxRow(final long row) {
return lastRow(row);
default long maxRow(final long rowKey) {
return lastRow(rowKey);
}

/**
37 changes: 13 additions & 24 deletions engine/table/src/main/java/io/deephaven/engine/page/Page.java
Original file line number Diff line number Diff line change
@@ -14,24 +14,13 @@
import org.jetbrains.annotations.NotNull;

/**
* This provides the {@link ChunkSource} interface to a contiguous block of data beginning at {@link #firstRowOffset()}
* and continuing to some row less than or equal to {@link #firstRowOffset()} + {@link #maxRow(long)}.
* Pages are {@link PagingChunkSource PagingChunkSources} that can supply values from a subset of a contiguous block of
* row key space beginning at {@link #firstRowOffset()} and continuing to {@link #firstRowOffset()} +
* {@link #maxRow(long)}. Not all row keys within the range may be valid; that is, pages may be sparse.
* <p>
* Non overlapping pages can be collected together in a {@link PageStore}, which provides the {@link ChunkSource}
* interface to the collection of all of its Pages.
* <p>
* There are two distinct use cases/types of pages. The first use case are {@code Page}s which always have a length()
* &gt; 0. These store length() values, which can be assessed via the {@link ChunkSource} methods. Valid
* {@link RowSequence} passed to those methods will have their offset in the range [firstRowOffset(), firstRowOffset() +
* length()). Passing OrderKeys with offsets outside of this range will have undefined results.
* <p>
* The second use case will always have length() == 0 and firstRowOffset() == 0. These represent "Null" regions which
* return a fixed value, typically a null value, for every {@link RowSequence} passed into the {@link ChunkSource}
* methods. In order to have this use case, override {@code length} and override {@code lastRow} as {@code maxRow}.
* <p>
* Though the {@link ChunkSource} methods ignore the non-offset portion of the rows in the {@link RowSequence}, they can
* assume they are identical for all the passed in elements of the {@link RowSequence}. For instance, they can use the
* simple difference between the complete row value to determine a length.
* Pages may be held within one or more {@link PageStore} instances. The PageStore is responsible for determining which
* row keys in absolute space are mapped to a particular Page. Pages need only concern themselves with lower order bits
* of the row keys they are asked for, after applying their {@link #mask()}.
*/
public interface Page<ATTR extends Any> extends PagingChunkSource<ATTR> {

@@ -42,20 +31,20 @@ public interface Page<ATTR extends Any> extends PagingChunkSource<ATTR> {
long firstRowOffset();

/**
* @param row Any row contained on this page.
* @return the first row of this page, located in the same way as row.
* @param rowKey Any row key contained on this page
* @return The first row key of this page, located in the same way as {@code rowKey}
*/
@FinalDefault
default long firstRow(final long row) {
return (row & ~mask()) | firstRowOffset();
default long firstRow(final long rowKey) {
return (rowKey & ~mask()) | firstRowOffset();
}

/**
* @return the offset for the given row in this page, in [0, {@code maxRow(row)}].
* @return The offset for the given row key in this page, in [0, {@code maxRow(rowKey)}].
*/
@FinalDefault
default long getRowOffset(long row) {
return (row & mask()) - firstRowOffset();
default long getRowOffset(final long rowKey) {
return (rowKey & mask()) - firstRowOffset();
}

/**
20 changes: 13 additions & 7 deletions engine/table/src/main/java/io/deephaven/engine/page/PageStore.java
Original file line number Diff line number Diff line change
@@ -12,23 +12,27 @@
import io.deephaven.engine.rowset.RowSequenceFactory;
import io.deephaven.util.annotations.FinalDefault;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;

/**
* PageStores are a collection of non-overlapping pages, which provides a single {@link ChunkSource} interface across
* all the pages.
* PageStores are a collection of non-overlapping {@link Page Pages}, providing a single {@link PagingChunkSource}
* across all the pages. PageStores are responsible for mapping row keys to pages. PageStores may themselves be Pages
* nested within other PageStores.
*/
public interface PageStore<ATTR extends Any, INNER_ATTR extends ATTR, PAGE extends Page<INNER_ATTR>>
extends PagingChunkSource<ATTR>, DefaultChunkSource.SupportsContiguousGet<ATTR> {

/**
* @return The page containing row, after applying {@link #mask()}.
* @param fillContext The fill context to use; may be {@code null} if the calling code does not have a fill context
* @param rowKey The row key to get the page for
* @return The page containing {@code rowKey}, after applying {@link #mask()}.
*/
@NotNull
PAGE getPageContaining(FillContext fillContext, long row);
PAGE getPageContaining(@Nullable FillContext fillContext, long rowKey);

@Override
default Chunk<? extends ATTR> getChunk(@NotNull final GetContext context, @NotNull final RowSequence rowSequence) {
if (rowSequence.size() == 0) {
if (rowSequence.isEmpty()) {
return getChunkType().getEmptyChunk();
}

@@ -65,9 +69,11 @@ default Chunk<? extends ATTR> getChunk(@NotNull final GetContext context, final
}

@Override
default void fillChunk(@NotNull final FillContext context, @NotNull final WritableChunk<? super ATTR> destination,
default void fillChunk(
@NotNull final FillContext context,
@NotNull final WritableChunk<? super ATTR> destination,
@NotNull final RowSequence rowSequence) {
if (rowSequence.size() == 0) {
if (rowSequence.isEmpty()) {
return;
}

Loading

0 comments on commit 90c2cb9

Please sign in to comment.