Skip to content

Commit

Permalink
fix build error and code style (#781)
Browse files Browse the repository at this point in the history
  • Loading branch information
wForget authored Jan 21, 2025
1 parent 320772d commit ecad36b
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ class BlazeFunctionSuite extends org.apache.spark.sql.QueryTest with BaseBlazeSQ
test("sum function with float input") {
withTable("t1") {
withSQLConf("spark.blaze.enable" -> "false") {
sql("set spark.blaze.enable=false")
sql("create table t1 using parquet as select 1.0f as c1")
val df = sql("select sum(c1) from t1")
checkAnswer(df, Seq(Row(1.23, 1.1)))
sql("set spark.blaze.enable=false")
sql("create table t1 using parquet as select 1.0f as c1")
val df = sql("select sum(c1) from t1")
checkAnswer(df, Seq(Row(1.23, 1.1)))
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import org.apache.spark.SparkContext
import org.apache.spark.TaskContext
import org.blaze.protobuf.PhysicalPlanNode


class NativeRDD(
@transient private val rddSparkContext: SparkContext,
val metrics: MetricNode,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ class ArrowFFIExporter(rowIter: Iterator[InternalRow], schema: StructType) {
val arrowWriter = ArrowWriter.create(root)
var rowCount = 0

rowCount += 1
rowCount += 1
def processRows(): Unit = {
while (rowIter.hasNext
&& rowCount < maxBatchNumRows
Expand All @@ -94,6 +94,9 @@ class ArrowFFIExporter(rowIter: Iterator[InternalRow], schema: StructType) {
rowCount += 1
}
}
val currentUserInfo = UserGroupInformation.getCurrentUser
val nativeCurrentUser = NativeHelper.currentUser
val isNativeCurrentUser = currentUserInfo.equals(nativeCurrentUser)
// if current user is native user, process rows directly
if (isNativeCurrentUser) {
processRows()
Expand Down

0 comments on commit ecad36b

Please sign in to comment.