Skip to content

Commit 837b0ac

Browse files
committed
[HUDI-4482] remove guava in some moudle
1 parent c188852 commit 837b0ac

14 files changed

Lines changed: 112 additions & 122 deletions

File tree

hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/payload/SqlTypedRecord.scala

Lines changed: 11 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.hudi.command.payload
1919

20-
import com.google.common.cache.{Cache, CacheBuilder}
20+
import com.github.benmanes.caffeine.cache.{Cache, Caffeine}
2121
import org.apache.avro.Schema
2222
import org.apache.avro.generic.IndexedRecord
2323
import org.apache.hudi.HoodieSparkUtils.sparkAdapter
@@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.InternalRow
2727
import org.apache.spark.sql.hudi.command.payload.SqlTypedRecord.{getAvroDeserializer, getSqlType}
2828
import org.apache.spark.sql.types.StructType
2929

30-
import java.util.concurrent.Callable
30+
import java.util.function.Function
3131

3232
/**
3333
* A sql typed record which will convert the avro field to sql typed value.
@@ -49,30 +49,24 @@ class SqlTypedRecord(val record: IndexedRecord) extends IndexedRecord {
4949

5050
object SqlTypedRecord {
5151

52-
private val sqlTypeCache = CacheBuilder.newBuilder().build[Schema, StructType]()
52+
private val sqlTypeCache = Caffeine.newBuilder()
53+
.maximumSize(16).build[Schema, StructType]
5354

5455
private val avroDeserializerCacheLocal = new ThreadLocal[Cache[Schema, HoodieAvroDeserializer]] {
55-
override def initialValue(): Cache[Schema, HoodieAvroDeserializer] =
56-
CacheBuilder.newBuilder().build[Schema, HoodieAvroDeserializer]()
56+
override def initialValue(): Cache[Schema, HoodieAvroDeserializer] = {
57+
Caffeine.newBuilder().maximumSize(16).build[Schema, HoodieAvroDeserializer]
58+
}
5759
}
5860

5961
def getSqlType(schema: Schema): StructType = {
60-
sqlTypeCache.get(schema, new Callable[StructType] {
61-
override def call(): StructType = {
62-
val structType = AvroConversionUtils.convertAvroSchemaToStructType(schema)
63-
sqlTypeCache.put(schema, structType)
64-
structType
65-
}
62+
sqlTypeCache.get(schema, new Function[Schema, StructType] {
63+
override def apply(t: Schema): StructType = AvroConversionUtils.convertAvroSchemaToStructType(t)
6664
})
6765
}
6866

6967
def getAvroDeserializer(schema: Schema): HoodieAvroDeserializer= {
70-
avroDeserializerCacheLocal.get().get(schema, new Callable[HoodieAvroDeserializer] {
71-
override def call(): HoodieAvroDeserializer = {
72-
val deserializer = sparkAdapter.createAvroDeserializer(schema, getSqlType(schema))
73-
avroDeserializerCacheLocal.get().put(schema, deserializer)
74-
deserializer
75-
}
68+
avroDeserializerCacheLocal.get().get(schema, new Function[Schema, HoodieAvroDeserializer] {
69+
override def apply(t: Schema): HoodieAvroDeserializer = sparkAdapter.createAvroDeserializer(t, getSqlType(t))
7670
})
7771
}
7872
}

hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/payload/ExpressionPayload.scala

Lines changed: 16 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.hudi.command.payload
1919

20-
import com.google.common.cache.CacheBuilder
20+
import com.github.benmanes.caffeine.cache.Caffeine
2121
import org.apache.avro.Schema
2222
import org.apache.avro.generic.{GenericData, GenericRecord, IndexedRecord}
2323
import org.apache.hudi.AvroConversionUtils
@@ -32,11 +32,11 @@ import org.apache.hudi.sql.IExpressionEvaluator
3232
import org.apache.spark.sql.avro.{AvroSerializer, SchemaConverters}
3333
import org.apache.spark.sql.catalyst.expressions.Expression
3434
import org.apache.spark.sql.hudi.SerDeUtils
35-
import org.apache.spark.sql.hudi.command.payload.ExpressionPayload.{getEvaluator, setWriteSchema, getMergedSchema}
35+
import org.apache.spark.sql.hudi.command.payload.ExpressionPayload.{getEvaluator, getMergedSchema, setWriteSchema}
3636
import org.apache.spark.sql.types.{StructField, StructType}
3737

38-
import java.util.concurrent.Callable
3938
import java.util.{Base64, Properties}
39+
import java.util.function.Function
4040
import scala.collection.JavaConverters._
4141
import scala.collection.mutable.ArrayBuffer
4242

@@ -270,19 +270,19 @@ object ExpressionPayload {
270270
* The Map[IExpressionEvaluator, IExpressionEvaluator] is the map of the condition expression
271271
* to the assignments expression.
272272
*/
273-
private val cache = CacheBuilder.newBuilder()
273+
private val cache = Caffeine.newBuilder()
274274
.maximumSize(1024)
275275
.build[String, Map[IExpressionEvaluator, IExpressionEvaluator]]()
276276

277-
private val writeSchemaCache = CacheBuilder.newBuilder().build[String, Schema]()
277+
private val writeSchemaCache = Caffeine.newBuilder()
278+
.maximumSize(16).build[String, Schema]()
278279

279280
def setWriteSchema(properties: Properties): Schema = {
280281
ValidationUtils.checkArgument(properties.containsKey(HoodieWriteConfig.WRITE_SCHEMA.key),
281282
s"Missing ${HoodieWriteConfig.WRITE_SCHEMA.key}")
282283
writeSchemaCache.get(properties.getProperty(HoodieWriteConfig.WRITE_SCHEMA.key),
283-
new Callable[Schema] {
284-
override def call(): Schema =
285-
new Schema.Parser().parse(properties.getProperty(HoodieWriteConfig.WRITE_SCHEMA.key))
284+
new Function[String, Schema] {
285+
override def apply(t: String): Schema = new Schema.Parser().parse(t)
286286
})
287287
}
288288

@@ -293,10 +293,9 @@ object ExpressionPayload {
293293
def getEvaluator(
294294
serializedConditionAssignments: String, writeSchema: Schema): Map[IExpressionEvaluator, IExpressionEvaluator] = {
295295
cache.get(serializedConditionAssignments,
296-
new Callable[Map[IExpressionEvaluator, IExpressionEvaluator]] {
297-
298-
override def call(): Map[IExpressionEvaluator, IExpressionEvaluator] = {
299-
val serializedBytes = Base64.getDecoder.decode(serializedConditionAssignments)
296+
new Function[String, Map[IExpressionEvaluator, IExpressionEvaluator]] {
297+
override def apply(t: String): Map[IExpressionEvaluator, IExpressionEvaluator] = {
298+
val serializedBytes = Base64.getDecoder.decode(t)
300299
val conditionAssignments = SerDeUtils.toObject(serializedBytes)
301300
.asInstanceOf[Map[Expression, Seq[Expression]]]
302301
// Do the CodeGen for condition expression and assignment expression
@@ -316,14 +315,14 @@ object ExpressionPayload {
316315
})
317316
}
318317

319-
private val mergedSchemaCache = CacheBuilder.newBuilder().build[TupleSchema, Schema]()
318+
private val mergedSchemaCache = Caffeine.newBuilder().maximumSize(16).build[TupleSchema, Schema]()
320319

321320
def getMergedSchema(source: Schema, target: Schema): Schema = {
322321

323-
mergedSchemaCache.get(TupleSchema(source, target), new Callable[Schema] {
324-
override def call(): Schema = {
325-
val rightSchema = HoodieAvroUtils.removeMetadataFields(target)
326-
mergeSchema(source, rightSchema)
322+
mergedSchemaCache.get(TupleSchema(source, target), new Function[TupleSchema, Schema] {
323+
override def apply(t: TupleSchema): Schema = {
324+
val rightSchema = HoodieAvroUtils.removeMetadataFields(t.second)
325+
mergeSchema(t.first, rightSchema)
327326
}
328327
})
329328
}

hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/HoodieProcedures.scala

Lines changed: 55 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -17,72 +17,68 @@
1717

1818
package org.apache.spark.sql.hudi.command.procedures
1919

20-
import com.google.common.collect.ImmutableMap
21-
22-
import java.util
2320
import java.util.Locale
2421
import java.util.function.Supplier
2522

2623
object HoodieProcedures {
27-
private val BUILDERS: util.Map[String, Supplier[ProcedureBuilder]] = initProcedureBuilders
24+
private val BUILDERS: Map[String, Supplier[ProcedureBuilder]] = initProcedureBuilders
2825

2926
def newBuilder(name: String): ProcedureBuilder = {
30-
val builderSupplier: Supplier[ProcedureBuilder] = BUILDERS.get(name.toLowerCase(Locale.ROOT))
31-
if (builderSupplier != null) builderSupplier.get else null
27+
val builderSupplier = BUILDERS.get(name.toLowerCase(Locale.ROOT))
28+
if (builderSupplier.isDefined) builderSupplier.get.get() else null
3229
}
3330

34-
private def initProcedureBuilders: util.Map[String, Supplier[ProcedureBuilder]] = {
35-
val mapBuilder: ImmutableMap.Builder[String, Supplier[ProcedureBuilder]] = ImmutableMap.builder()
36-
mapBuilder.put(RunCompactionProcedure.NAME, RunCompactionProcedure.builder)
37-
mapBuilder.put(ShowCompactionProcedure.NAME, ShowCompactionProcedure.builder)
38-
mapBuilder.put(CreateSavepointProcedure.NAME, CreateSavepointProcedure.builder)
39-
mapBuilder.put(DeleteSavepointProcedure.NAME, DeleteSavepointProcedure.builder)
40-
mapBuilder.put(RollbackToSavepointProcedure.NAME, RollbackToSavepointProcedure.builder)
41-
mapBuilder.put(RollbackToInstantTimeProcedure.NAME, RollbackToInstantTimeProcedure.builder)
42-
mapBuilder.put(RunClusteringProcedure.NAME, RunClusteringProcedure.builder)
43-
mapBuilder.put(ShowClusteringProcedure.NAME, ShowClusteringProcedure.builder)
44-
mapBuilder.put(ShowCommitsProcedure.NAME, ShowCommitsProcedure.builder)
45-
mapBuilder.put(ShowCommitsMetadataProcedure.NAME, ShowCommitsMetadataProcedure.builder)
46-
mapBuilder.put(ShowArchivedCommitsProcedure.NAME, ShowArchivedCommitsProcedure.builder)
47-
mapBuilder.put(ShowArchivedCommitsMetadataProcedure.NAME, ShowArchivedCommitsMetadataProcedure.builder)
48-
mapBuilder.put(ShowCommitFilesProcedure.NAME, ShowCommitFilesProcedure.builder)
49-
mapBuilder.put(ShowCommitPartitionsProcedure.NAME, ShowCommitPartitionsProcedure.builder)
50-
mapBuilder.put(ShowCommitWriteStatsProcedure.NAME, ShowCommitWriteStatsProcedure.builder)
51-
mapBuilder.put(CommitsCompareProcedure.NAME, CommitsCompareProcedure.builder)
52-
mapBuilder.put(ShowSavepointsProcedure.NAME, ShowSavepointsProcedure.builder)
53-
mapBuilder.put(DeleteMarkerProcedure.NAME, DeleteMarkerProcedure.builder)
54-
mapBuilder.put(ShowRollbacksProcedure.NAME, ShowRollbacksProcedure.builder)
55-
mapBuilder.put(ShowRollbackDetailProcedure.NAME, ShowRollbackDetailProcedure.builder)
56-
mapBuilder.put(ExportInstantsProcedure.NAME, ExportInstantsProcedure.builder)
57-
mapBuilder.put(ShowAllFileSystemViewProcedure.NAME, ShowAllFileSystemViewProcedure.builder)
58-
mapBuilder.put(ShowLatestFileSystemViewProcedure.NAME, ShowLatestFileSystemViewProcedure.builder)
59-
mapBuilder.put(ShowHoodieLogFileMetadataProcedure.NAME, ShowHoodieLogFileMetadataProcedure.builder)
60-
mapBuilder.put(ShowHoodieLogFileRecordsProcedure.NAME, ShowHoodieLogFileRecordsProcedure.builder)
61-
mapBuilder.put(StatsWriteAmplificationProcedure.NAME, StatsWriteAmplificationProcedure.builder)
62-
mapBuilder.put(StatsFileSizeProcedure.NAME, StatsFileSizeProcedure.builder)
63-
mapBuilder.put(HdfsParquetImportProcedure.NAME, HdfsParquetImportProcedure.builder)
64-
mapBuilder.put(RunBootstrapProcedure.NAME, RunBootstrapProcedure.builder)
65-
mapBuilder.put(ShowBootstrapMappingProcedure.NAME, ShowBootstrapMappingProcedure.builder)
66-
mapBuilder.put(ShowBootstrapPartitionsProcedure.NAME, ShowBootstrapPartitionsProcedure.builder)
67-
mapBuilder.put(UpgradeTableProcedure.NAME, UpgradeTableProcedure.builder)
68-
mapBuilder.put(DowngradeTableProcedure.NAME, DowngradeTableProcedure.builder)
69-
mapBuilder.put(ShowMetadataTableFilesProcedure.NAME, ShowMetadataTableFilesProcedure.builder)
70-
mapBuilder.put(ShowMetadataTablePartitionsProcedure.NAME, ShowMetadataTablePartitionsProcedure.builder)
71-
mapBuilder.put(CreateMetadataTableProcedure.NAME, CreateMetadataTableProcedure.builder)
72-
mapBuilder.put(DeleteMetadataTableProcedure.NAME, DeleteMetadataTableProcedure.builder)
73-
mapBuilder.put(InitMetadataTableProcedure.NAME, InitMetadataTableProcedure.builder)
74-
mapBuilder.put(ShowMetadataTableStatsProcedure.NAME, ShowMetadataTableStatsProcedure.builder)
75-
mapBuilder.put(ValidateMetadataTableFilesProcedure.NAME, ValidateMetadataTableFilesProcedure.builder)
76-
mapBuilder.put(ShowFsPathDetailProcedure.NAME, ShowFsPathDetailProcedure.builder)
77-
mapBuilder.put(CopyToTableProcedure.NAME, CopyToTableProcedure.builder)
78-
mapBuilder.put(RepairAddpartitionmetaProcedure.NAME, RepairAddpartitionmetaProcedure.builder)
79-
mapBuilder.put(RepairCorruptedCleanFilesProcedure.NAME, RepairCorruptedCleanFilesProcedure.builder)
80-
mapBuilder.put(RepairDeduplicateProcedure.NAME, RepairDeduplicateProcedure.builder)
81-
mapBuilder.put(RepairMigratePartitionMetaProcedure.NAME, RepairMigratePartitionMetaProcedure.builder)
82-
mapBuilder.put(RepairOverwriteHoodiePropsProcedure.NAME, RepairOverwriteHoodiePropsProcedure.builder)
83-
mapBuilder.put(RunCleanProcedure.NAME, RunCleanProcedure.builder)
84-
mapBuilder.put(ValidateHoodieSyncProcedure.NAME, ValidateHoodieSyncProcedure.builder)
85-
mapBuilder.put(ShowInvalidParquetProcedure.NAME, ShowInvalidParquetProcedure.builder)
86-
mapBuilder.build
31+
private def initProcedureBuilders: Map[String, Supplier[ProcedureBuilder]] = {
32+
Map((RunCompactionProcedure.NAME, RunCompactionProcedure.builder)
33+
,(ShowCompactionProcedure.NAME, ShowCompactionProcedure.builder)
34+
,(CreateSavepointProcedure.NAME, CreateSavepointProcedure.builder)
35+
,(DeleteSavepointProcedure.NAME, DeleteSavepointProcedure.builder)
36+
,(RollbackToSavepointProcedure.NAME, RollbackToSavepointProcedure.builder)
37+
,(RollbackToInstantTimeProcedure.NAME, RollbackToInstantTimeProcedure.builder)
38+
,(RunClusteringProcedure.NAME, RunClusteringProcedure.builder)
39+
,(ShowClusteringProcedure.NAME, ShowClusteringProcedure.builder)
40+
,(ShowCommitsProcedure.NAME, ShowCommitsProcedure.builder)
41+
,(ShowCommitsMetadataProcedure.NAME, ShowCommitsMetadataProcedure.builder)
42+
,(ShowArchivedCommitsProcedure.NAME, ShowArchivedCommitsProcedure.builder)
43+
,(ShowArchivedCommitsMetadataProcedure.NAME, ShowArchivedCommitsMetadataProcedure.builder)
44+
,(ShowCommitFilesProcedure.NAME, ShowCommitFilesProcedure.builder)
45+
,(ShowCommitPartitionsProcedure.NAME, ShowCommitPartitionsProcedure.builder)
46+
,(ShowCommitWriteStatsProcedure.NAME, ShowCommitWriteStatsProcedure.builder)
47+
,(CommitsCompareProcedure.NAME, CommitsCompareProcedure.builder)
48+
,(ShowSavepointsProcedure.NAME, ShowSavepointsProcedure.builder)
49+
,(DeleteMarkerProcedure.NAME, DeleteMarkerProcedure.builder)
50+
,(ShowRollbacksProcedure.NAME, ShowRollbacksProcedure.builder)
51+
,(ShowRollbackDetailProcedure.NAME, ShowRollbackDetailProcedure.builder)
52+
,(ExportInstantsProcedure.NAME, ExportInstantsProcedure.builder)
53+
,(ShowAllFileSystemViewProcedure.NAME, ShowAllFileSystemViewProcedure.builder)
54+
,(ShowLatestFileSystemViewProcedure.NAME, ShowLatestFileSystemViewProcedure.builder)
55+
,(ShowHoodieLogFileMetadataProcedure.NAME, ShowHoodieLogFileMetadataProcedure.builder)
56+
,(ShowHoodieLogFileRecordsProcedure.NAME, ShowHoodieLogFileRecordsProcedure.builder)
57+
,(StatsWriteAmplificationProcedure.NAME, StatsWriteAmplificationProcedure.builder)
58+
,(StatsFileSizeProcedure.NAME, StatsFileSizeProcedure.builder)
59+
,(HdfsParquetImportProcedure.NAME, HdfsParquetImportProcedure.builder)
60+
,(RunBootstrapProcedure.NAME, RunBootstrapProcedure.builder)
61+
,(ShowBootstrapMappingProcedure.NAME, ShowBootstrapMappingProcedure.builder)
62+
,(ShowBootstrapPartitionsProcedure.NAME, ShowBootstrapPartitionsProcedure.builder)
63+
,(UpgradeTableProcedure.NAME, UpgradeTableProcedure.builder)
64+
,(DowngradeTableProcedure.NAME, DowngradeTableProcedure.builder)
65+
,(ShowMetadataTableFilesProcedure.NAME, ShowMetadataTableFilesProcedure.builder)
66+
,(ShowMetadataTablePartitionsProcedure.NAME, ShowMetadataTablePartitionsProcedure.builder)
67+
,(CreateMetadataTableProcedure.NAME, CreateMetadataTableProcedure.builder)
68+
,(DeleteMetadataTableProcedure.NAME, DeleteMetadataTableProcedure.builder)
69+
,(InitMetadataTableProcedure.NAME, InitMetadataTableProcedure.builder)
70+
,(ShowMetadataTableStatsProcedure.NAME, ShowMetadataTableStatsProcedure.builder)
71+
,(ValidateMetadataTableFilesProcedure.NAME, ValidateMetadataTableFilesProcedure.builder)
72+
,(ShowFsPathDetailProcedure.NAME, ShowFsPathDetailProcedure.builder)
73+
,(CopyToTableProcedure.NAME, CopyToTableProcedure.builder)
74+
,(RepairAddpartitionmetaProcedure.NAME, RepairAddpartitionmetaProcedure.builder)
75+
,(RepairCorruptedCleanFilesProcedure.NAME, RepairCorruptedCleanFilesProcedure.builder)
76+
,(RepairDeduplicateProcedure.NAME, RepairDeduplicateProcedure.builder)
77+
,(RepairMigratePartitionMetaProcedure.NAME, RepairMigratePartitionMetaProcedure.builder)
78+
,(RepairOverwriteHoodiePropsProcedure.NAME, RepairOverwriteHoodiePropsProcedure.builder)
79+
,(RunCleanProcedure.NAME, RunCleanProcedure.builder)
80+
,(ValidateHoodieSyncProcedure.NAME, ValidateHoodieSyncProcedure.builder)
81+
,(ShowInvalidParquetProcedure.NAME, ShowInvalidParquetProcedure.builder)
82+
)
8783
}
8884
}

hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowBootstrapMappingProcedure.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.hudi.command.procedures
1919

20-
import com.google.common.collect.Lists
2120
import org.apache.hudi.common.bootstrap.index.BootstrapIndex
2221
import org.apache.hudi.common.model.{BootstrapFileMapping, HoodieFileGroupId}
2322
import org.apache.hudi.common.table.HoodieTableMetaClient

hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowFileSystemViewProcedure.scala

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.hudi.command.procedures
1919

20-
import com.google.common.collect.Lists
2120
import org.apache.hadoop.fs.{FileStatus, Path}
2221
import org.apache.hudi.common.fs.FSUtils
2322
import org.apache.hudi.common.model.{FileSlice, HoodieLogFile}
@@ -30,7 +29,8 @@ import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType}
3029

3130
import java.util.function.{Function, Supplier}
3231
import java.util.stream.Collectors
33-
import scala.collection.JavaConverters.{asJavaIteratorConverter, asScalaIteratorConverter}
32+
import scala.collection.JavaConversions
33+
import scala.collection.JavaConverters.{asJavaIterableConverter, asJavaIteratorConverter, asScalaIteratorConverter}
3434

3535
class ShowFileSystemViewProcedure(showLatest: Boolean) extends BaseProcedure with ProcedureBuilder {
3636
private val PARAMETERS_ALL: Array[ProcedureParameter] = Array[ProcedureParameter](
@@ -118,12 +118,14 @@ class ShowFileSystemViewProcedure(showLatest: Boolean) extends BaseProcedure wit
118118
metaClient.getActiveTimeline.getInstantDetails(instant)
119119
}
120120
}
121-
val filteredTimeline = new HoodieDefaultTimeline(Lists.newArrayList(instants.asJava).stream(), details)
121+
122+
val filteredTimeline = new HoodieDefaultTimeline(
123+
new java.util.ArrayList[HoodieInstant](JavaConversions.asJavaCollection(instants.toList)).stream(), details)
122124
new HoodieTableFileSystemView(metaClient, filteredTimeline, statuses.toArray(new Array[FileStatus](0)))
123125
}
124126

125127
private def showAllFileSlices(fsView: HoodieTableFileSystemView): java.util.List[Row] = {
126-
val rows: java.util.List[Row] = Lists.newArrayList()
128+
val rows: java.util.List[Row] = new java.util.ArrayList[Row]
127129
fsView.getAllFileGroups.iterator().asScala.foreach(fg => {
128130
fg.getAllFileSlices.iterator().asScala.foreach(fs => {
129131
val fileId = fg.getFileGroupId.getFileId
@@ -161,7 +163,7 @@ class ShowFileSystemViewProcedure(showLatest: Boolean) extends BaseProcedure wit
161163
maxInstant
162164
})
163165
}
164-
val rows: java.util.List[Row] = Lists.newArrayList()
166+
val rows: java.util.List[Row] = new java.util.ArrayList[Row]
165167
fileSliceStream.iterator().asScala.foreach {
166168
fs => {
167169
val fileId = fs.getFileId

hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/procedures/ShowFsPathDetailProcedure.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.hudi.command.procedures
1919

20-
import com.google.common.collect.Lists
2120
import org.apache.hadoop.fs.{ContentSummary, FileStatus, Path}
2221
import org.apache.hudi.common.fs.FSUtils
2322
import org.apache.spark.sql.Row
@@ -58,7 +57,7 @@ class ShowFsPathDetailProcedure extends BaseProcedure with ProcedureBuilder {
5857
val path: Path = new Path(srcPath)
5958
val fs = FSUtils.getFs(path, jsc.hadoopConfiguration())
6059
val status: Array[FileStatus] = if (isSub) fs.listStatus(path) else fs.globStatus(path)
61-
val rows: java.util.List[Row] = Lists.newArrayList()
60+
val rows: java.util.List[Row] = new java.util.ArrayList[Row]()
6261

6362
if (status.nonEmpty) {
6463
for (i <- status.indices) {

0 commit comments

Comments
 (0)