Skip to content

Commit

Permalink
Add Type.createNullBlock to create a block containing a single null
Browse files Browse the repository at this point in the history
  • Loading branch information
dain committed Oct 16, 2024
1 parent 1b37833 commit ac778c9
Show file tree
Hide file tree
Showing 15 changed files with 24 additions and 40 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,7 @@ private static Page makeNullValuesPage(List<Type> types)
{
Block[] columns = new Block[types.size()];
for (int i = 0; i < types.size(); i++) {
columns[i] = types.get(i).createBlockBuilder(null, 1)
.appendNull()
.build();
columns[i] = types.get(i).createNullBlock();
}
return new Page(1, columns);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,9 +75,7 @@ public Operator createOperator(DriverContext driverContext)
// it's easier to create null blocks for every output column even though we only null out some grouping column outputs
Block[] nullBlocks = new Block[outputTypes.size()];
for (int i = 0; i < outputTypes.size(); i++) {
nullBlocks[i] = outputTypes.get(i).createBlockBuilder(null, 1)
.appendNull()
.build();
nullBlocks[i] = outputTypes.get(i).createNullBlock();
}

// create groupid blocks for every group
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ private static Block[] emptyBlocks(Map<Symbol, Integer> layout)
Block[] blocks = new Block[layout.size()];
for (Map.Entry<Symbol, Integer> entry : layout.entrySet()) {
if (!entry.getKey().type().equals(SPOOLING_METADATA_TYPE)) {
blocks[entry.getValue()] = entry.getKey().type().createBlockBuilder(null, 1).appendNull().build();
blocks[entry.getValue()] = entry.getKey().type().createNullBlock();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ public Block build()
result = fromNotNullSuppressedFieldBlocks(positionCount, hasNullRow ? Optional.of(rowIsNull) : Optional.empty(), fieldBlocks);
}
else if (hasNullRow) {
Block nullRowBlock = type.createBlockBuilder(null, 0).appendNull().build();
Block nullRowBlock = type.createNullBlock();
result = RunLengthEncodedBlock.create(nullRowBlock, positionCount);
}
else {
Expand Down
10 changes: 10 additions & 0 deletions core/trino-spi/src/main/java/io/trino/spi/type/Type.java
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,16 @@ default TypeOperatorDeclaration getTypeOperatorDeclaration(TypeOperators typeOpe
*/
BlockBuilder createBlockBuilder(BlockBuilderStatus blockBuilderStatus, int expectedEntries);

/**
* Creates a block containing as single null values.
*/
default ValueBlock createNullBlock()
{
return createBlockBuilder(null, 1, 0)
.appendNull()
.buildValueBlock();
}

/**
* Gets an object representation of the type value in the {@code block}
* {@code position}. This is the value returned to the user via the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -439,9 +439,7 @@ public Block readBlock(int columnIndex)
checkState(currentChunkRowCount > 0, "No more data");

if (columnIndex >= columns.length) {
Type type = readColumns.get(columnIndex);
Block nullBlock = type.createBlockBuilder(null, 1, 0).appendNull().build();
return RunLengthEncodedBlock.create(nullBlock, currentChunkRowCount);
return RunLengthEncodedBlock.create(readColumns.get(columnIndex).createNullBlock(), currentChunkRowCount);
}

return columns[columnIndex].readBlock(rowGroupPosition, currentChunkRowCount);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.block.RunLengthEncodedBlock;
import io.trino.spi.type.Type;

Expand Down Expand Up @@ -88,9 +87,7 @@ public RcFileFileWriter(

ImmutableList.Builder<Block> nullBlocks = ImmutableList.builder();
for (Type fileColumnType : fileColumnTypes) {
BlockBuilder blockBuilder = fileColumnType.createBlockBuilder(null, 1, 0);
blockBuilder.appendNull();
nullBlocks.add(blockBuilder.build());
nullBlocks.add(fileColumnType.createNullBlock());
}
this.nullBlocks = nullBlocks.build();
this.validationInputFactory = validationInputFactory;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ public AvroHiveFileWriter(
outputColumnNames.add(entry.getKey().toLowerCase(Locale.ENGLISH));
Type type = avroTypeBlockHandler.typeFor(entry.getValue().schema());
outputColumnTypes.add(type);
blocks.add(type.createBlockBuilder(null, 1).appendNull().build());
blocks.add(type.createNullBlock());
}
typeCorrectNullBlocks = blocks.build();
fileWriter = new AvroFileWriter(countingOutputStream, fileSchema, typeManager, compressionKind, metadata, outputColumnNames.build(), outputColumnTypes.build(), true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.block.RunLengthEncodedBlock;
import io.trino.spi.type.Type;

Expand Down Expand Up @@ -57,9 +56,7 @@ public LineFileWriter(LineWriter lineWriter, LineSerializer serializer, Closeabl

ImmutableList.Builder<Block> nullBlocks = ImmutableList.builder();
for (Type fileColumnType : serializer.getTypes()) {
BlockBuilder blockBuilder = fileColumnType.createBlockBuilder(null, 1, 0);
blockBuilder.appendNull();
nullBlocks.add(blockBuilder.build());
nullBlocks.add(fileColumnType.createNullBlock());
}
this.nullBlocks = nullBlocks.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.block.LongArrayBlock;
import io.trino.spi.block.RowBlock;
import io.trino.spi.block.RunLengthEncodedBlock;
Expand Down Expand Up @@ -113,9 +112,7 @@ public OrcFileWriter(

ImmutableList.Builder<Block> nullBlocks = ImmutableList.builder();
for (Type fileColumnType : fileColumnTypes) {
BlockBuilder blockBuilder = fileColumnType.createBlockBuilder(null, 1, 0);
blockBuilder.appendNull();
nullBlocks.add(blockBuilder.build());
nullBlocks.add(fileColumnType.createNullBlock());
}
this.nullBlocks = nullBlocks.build();
this.validationInputFactory = validationInputFactory;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -301,9 +301,7 @@ private static class NullColumn
public NullColumn(Type type)
{
this.type = requireNonNull(type, "type is null");
this.nullBlock = type.createBlockBuilder(null, 1, 0)
.appendNull()
.build();
this.nullBlock = type.createNullBlock();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.block.RunLengthEncodedBlock;
import io.trino.spi.type.Type;
import org.apache.parquet.format.CompressionCodec;
Expand Down Expand Up @@ -102,9 +101,7 @@ public ParquetFileWriter(

ImmutableList.Builder<Block> nullBlocks = ImmutableList.builder();
for (Type fileColumnType : fileColumnTypes) {
BlockBuilder blockBuilder = fileColumnType.createBlockBuilder(null, 1, 0);
blockBuilder.appendNull();
nullBlocks.add(blockBuilder.build());
nullBlocks.add(fileColumnType.createNullBlock());
}
this.nullBlocks = nullBlocks.build();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,9 +245,7 @@ private static class NullColumn

private NullColumn(Type type)
{
this.nullBlock = type.createBlockBuilder(null, 1, 0)
.appendNull()
.build();
this.nullBlock = type.createNullBlock();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import io.trino.spi.Page;
import io.trino.spi.TrinoException;
import io.trino.spi.block.Block;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.block.LazyBlock;
import io.trino.spi.block.LazyBlockLoader;
import io.trino.spi.block.RunLengthEncodedBlock;
Expand All @@ -44,7 +43,6 @@ public class RcFilePageSource
{
private static final long GUESSED_MEMORY_USAGE = DataSize.of(16, DataSize.Unit.MEGABYTE).toBytes();

private static final int NULL_ENTRY_SIZE = 0;
private final RcFileReader rcFileReader;

private final List<String> columnNames;
Expand Down Expand Up @@ -82,9 +80,7 @@ public RcFilePageSource(RcFileReader rcFileReader, List<HiveColumnHandle> column
if (hiveColumnIndexes[columnIndex] >= rcFileReader.getColumnCount()) {
// this file may contain fewer fields than what's declared in the schema
// this happens when additional columns are added to the hive table after files have been created
BlockBuilder blockBuilder = column.getType().createBlockBuilder(null, 1, NULL_ENTRY_SIZE);
blockBuilder.appendNull();
constantBlocks[columnIndex] = blockBuilder.build();
constantBlocks[columnIndex] = column.getType().createNullBlock();
}
}
types = typesBuilder.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ public IcebergOrcFileWriter(
this.fileInputColumnIndexes = requireNonNull(fileInputColumnIndexes, "fileInputColumnIndexes is null");

this.nullBlocks = fileColumnTypes.stream()
.map(type -> type.createBlockBuilder(null, 1, 0).appendNull().build())
.map(Type::createNullBlock)
.collect(toImmutableList());

this.validationInputFactory = validationInputFactory;
Expand Down

0 comments on commit ac778c9

Please sign in to comment.