Skip to content

Commit ca9b8fb

Browse files
committed
fix ci
1 parent 0445bf7 commit ca9b8fb

3 files changed

Lines changed: 9 additions & 17 deletions

File tree

hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieMergeOnReadTestUtils.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -133,8 +133,7 @@ public static List<GenericRecord> getRecordsUsingInputFormat(Configuration conf,
133133
hiveColumnTypes = rawHiveColumnTypes;
134134
}
135135

136-
JobConf newConf = new JobConf(jobConf);
137-
setPropsForInputFormat(inputFormat, newConf, schema, hiveColumnTypes, projectCols, projectedColumns, populateMetaFields);
136+
setPropsForInputFormat(inputFormat, jobConf, schema, hiveColumnTypes, projectCols, projectedColumns, populateMetaFields);
138137
final List<Field> fields;
139138
if (projectCols) {
140139
fields = schema.getFields().stream().filter(f -> projectedColumns.contains(f.name()))
@@ -148,11 +147,11 @@ public static List<GenericRecord> getRecordsUsingInputFormat(Configuration conf,
148147

149148
List<GenericRecord> records = new ArrayList<>();
150149
try {
151-
FileInputFormat.setInputPaths(newConf, String.join(",", inputPaths));
152-
InputSplit[] splits = inputFormat.getSplits(newConf, inputPaths.size());
150+
FileInputFormat.setInputPaths(jobConf, String.join(",", inputPaths));
151+
InputSplit[] splits = inputFormat.getSplits(jobConf, inputPaths.size());
153152

154153
for (InputSplit split : splits) {
155-
RecordReader recordReader = inputFormat.getRecordReader(split, newConf, null);
154+
RecordReader recordReader = inputFormat.getRecordReader(split, jobConf, null);
156155
Object key = recordReader.createKey();
157156
ArrayWritable writable = (ArrayWritable) recordReader.createValue();
158157
while (recordReader.next(key, writable)) {

hudi-hadoop-mr/src/main/java/org/apache/hudi/hadoop/utils/HiveAvroSerializer.java

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -69,12 +69,12 @@
6969
*/
7070
public class HiveAvroSerializer {
7171

72-
private static final Logger LOG = LogManager.getLogger(HiveAvroSerializer.class);
73-
7472
private final List<String> columnNames;
7573
private final List<TypeInfo> columnTypes;
7674
private final ObjectInspector objectInspector;
7775

76+
private static final Logger LOG = LogManager.getLogger(HiveAvroSerializer.class);
77+
7878
public HiveAvroSerializer(ObjectInspector objectInspector, List<String> columnNames, List<TypeInfo> columnTypes) {
7979
this.columnNames = columnNames;
8080
this.columnTypes = columnTypes;
@@ -104,20 +104,13 @@ public GenericRecord serialize(Object o, Schema schema) {
104104
break;
105105
}
106106
try {
107-
// use to digest ci bug, should remove those code , before merged.
108107
setUpRecordFieldFromWritable(columnTypes.get(i), structFieldsDataAsList.get(i),
109108
allStructFieldRefs.get(i).getFieldObjectInspector(), record, field);
110109
} catch (Exception e) {
111-
// print log for debug
112-
LOG.error(String.format("failed to convert index: %s", i));
113-
LOG.error(String.format("current names: %s", columnNames.stream().collect(Collectors.joining(","))));
114-
LOG.error(String.format("current type %s",
115-
columnTypes.stream().map(f -> f.getTypeName()).collect(Collectors.joining(","))));
116-
LOG.error(String.format("current avro schema: %s", schema.toString()));
110+
LOG.error(String.format("current columnNames: %s", columnNames.stream().collect(Collectors.joining(","))));
111+
LOG.error(String.format("current type: %s", columnTypes.stream().map(f -> f.getTypeName()).collect(Collectors.joining(","))));
117112
LOG.error(String.format("current value: %s", HoodieRealtimeRecordReaderUtils.arrayWritableToString((ArrayWritable) o)));
118-
throw e;
119113
}
120-
121114
}
122115
return record;
123116
}

hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrap.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@
116116
public class TestBootstrap extends HoodieClientTestBase {
117117

118118
public static final String TRIP_HIVE_COLUMN_TYPES = "bigint,string,string,string,string,double,double,double,double,"
119-
+ "struct<amount:double,currency:string>,array<struct<amount:double,currency:string>>,boolean";
119+
+ "struct<amount:double,currency:string>,array<struct<element:struct<amount:double,currency:string>>>,boolean";
120120

121121
@TempDir
122122
public java.nio.file.Path tmpFolder;

0 commit comments

Comments
 (0)