Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,11 @@

import java.io.InputStream;
import java.util.Objects;
import java.util.function.Function;
import org.apache.hadoop.fs.ByteBufferReadable;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.parquet.io.PositionOutputStream;
import org.apache.parquet.io.SeekableInputStream;
import org.apache.parquet.util.DynMethods;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -38,11 +36,6 @@ public class HadoopStreams {

private static final Logger LOG = LoggerFactory.getLogger(HadoopStreams.class);

private static final DynMethods.UnboundMethod hasCapabilitiesMethod = new DynMethods.Builder("hasCapabilities")
.impl(FSDataInputStream.class, "hasCapabilities", String.class)
.orNoop()
.build();

/**
* Wraps a {@link FSDataInputStream} in a {@link SeekableInputStream}
* implementation for Parquet readers.
Expand All @@ -52,49 +45,16 @@ public class HadoopStreams {
*/
public static SeekableInputStream wrap(FSDataInputStream stream) {
Objects.requireNonNull(stream, "Cannot wrap a null input stream");

// Try to check using hasCapabilities(str)
Boolean hasCapabilitiesResult = isWrappedStreamByteBufferReadable(stream);

// If it is null, then fall back to the old method
if (hasCapabilitiesResult != null) {
if (hasCapabilitiesResult) {
return new H2SeekableInputStream(stream);
} else {
return new H1SeekableInputStream(stream);
}
}

return unwrapByteBufferReadableLegacy(stream).apply(stream);
}

/**
* Is the inner stream byte buffer readable?
* The test is 'the stream is not FSDataInputStream
* and implements ByteBufferReadable'
* <p>
* This logic is only used for Hadoop <2.9.x, and <3.x.x
*
* @param stream stream to probe
* @return A H2SeekableInputStream to access, or H1SeekableInputStream if the stream is not seekable
*/
private static Function<FSDataInputStream, SeekableInputStream> unwrapByteBufferReadableLegacy(
FSDataInputStream stream) {
InputStream wrapped = stream.getWrappedStream();
if (wrapped instanceof FSDataInputStream) {
LOG.debug("Checking on wrapped stream {} of {} whether is ByteBufferReadable", wrapped, stream);
return unwrapByteBufferReadableLegacy(((FSDataInputStream) wrapped));
}
if (stream.getWrappedStream() instanceof ByteBufferReadable) {
return H2SeekableInputStream::new;
if (isWrappedStreamByteBufferReadable(stream)) {
return new H2SeekableInputStream(stream);
} else {
return H1SeekableInputStream::new;
return new H1SeekableInputStream(stream);
}
}

/**
* Is the inner stream byte buffer readable?
* The test is 'the stream is not FSDataInputStream
* The test is "the stream is not FSDataInputStream
* and implements ByteBufferReadable'
* <p>
* That is: all streams which implement ByteBufferReadable
Expand All @@ -107,18 +67,10 @@ private static Function<FSDataInputStream, SeekableInputStream> unwrapByteBuffer
* FSDataInputStream will pass the probe down to the underlying stream.
*
* @param stream stream to probe
* @return true if it is safe to a H2SeekableInputStream to access
* the data, null when it cannot be determined because of missing hasCapabilities
* @return true if it is safe to a H2SeekableInputStream to access the data
*/
private static Boolean isWrappedStreamByteBufferReadable(FSDataInputStream stream) {
if (hasCapabilitiesMethod.isNoop()) {
// When the method is not available, just return a null
return null;
}

boolean isByteBufferReadable = hasCapabilitiesMethod.invoke(stream, "in:readbytebuffer");

if (isByteBufferReadable) {
private static boolean isWrappedStreamByteBufferReadable(FSDataInputStream stream) {
if (stream.hasCapability("in:readbytebuffer")) {
// stream is issuing the guarantee that it implements the
// API. Holds for all implementations in hadoop-*
// since Hadoop 3.3.0 (HDFS-14111).
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -639,7 +639,7 @@
<profile>
<id>hadoop2</id>
<properties>
<hadoop.version>2.7.3</hadoop.version>
<hadoop.version>2.9.2</hadoop.version>
</properties>
</profile>

Expand Down