Skip to content

Instantly share code, notes, and snippets.

@aajisaka
Created December 2, 2017 14:44
Show Gist options
  • Save aajisaka/26049b1ddaf0b092b8b08cf8a13b4844 to your computer and use it in GitHub Desktop.
Save aajisaka/26049b1ddaf0b092b8b08cf8a13b4844 to your computer and use it in GitHub Desktop.
Apply the patch and compile Apache Hadoop with Java 9.0.1
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
index be85497209b..eb7b34f8bd4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
@@ -27,6 +27,7 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Seekable;
+import org.apache.hadoop.util.CleanerUtil;
import com.google.common.base.Preconditions;
@@ -36,13 +37,14 @@
/** Forcibly free the direct buffer. */
public static void freeDB(ByteBuffer buffer) {
- if (buffer instanceof sun.nio.ch.DirectBuffer) {
- final sun.misc.Cleaner bufferCleaner =
- ((sun.nio.ch.DirectBuffer) buffer).cleaner();
- bufferCleaner.clean();
+ if (CleanerUtil.UNMAP_SUPPORTED) {
+ try {
+ CleanerUtil.getCleaner().freeBuffer(buffer);
+ } catch (Exception ignore) {
+ }
}
}
-
+
/** Read crypto buffer size */
public static int getBufferSize(Configuration conf) {
return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
index f601edd296a..9dea7ed8837 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
@@ -38,6 +38,7 @@
import org.apache.hadoop.fs.PathIOException;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
+import org.apache.hadoop.util.CleanerUtil;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.PerformanceAdvisory;
@@ -315,7 +316,7 @@ static void mlock(ByteBuffer buffer, long len)
}
mlock_native(buffer, len);
}
-
+
/**
* Unmaps the block from memory. See munmap(2).
*
@@ -329,10 +330,11 @@ static void mlock(ByteBuffer buffer, long len)
* @param buffer The buffer to unmap.
*/
public static void munmap(MappedByteBuffer buffer) {
- if (buffer instanceof sun.nio.ch.DirectBuffer) {
- sun.misc.Cleaner cleaner =
- ((sun.nio.ch.DirectBuffer)buffer).cleaner();
- cleaner.clean();
+ if (CleanerUtil.UNMAP_SUPPORTED) {
+ try {
+ CleanerUtil.getCleaner().freeBuffer(buffer);
+ } catch (Exception ignored) {
+ }
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CleanerUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CleanerUtil.java
new file mode 100644
index 00000000000..ba4caaa4b30
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CleanerUtil.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandle;
+import java.lang.invoke.MethodHandles;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.nio.ByteBuffer;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+import java.util.Objects;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+import static java.lang.invoke.MethodHandles.constant;
+import static java.lang.invoke.MethodHandles.dropArguments;
+import static java.lang.invoke.MethodHandles.filterReturnValue;
+import static java.lang.invoke.MethodHandles.guardWithTest;
+import static java.lang.invoke.MethodType.methodType;
+
+/**
+ * sun.misc.Cleaner has moved in OpenJDK 9 and
+ * sun.misc.Unsafe#invokeCleaner(ByteBuffer) is the replacement.
+ * This class is a hack to use sun.misc.Cleaner in Java 8 and
+ * use the replacement in Java 9+.
+ * This implementation is inspired by LUCENE-6989.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class CleanerUtil {
+
+ /**
+ * <code>true</code>, if this platform supports unmapping mmapped files.
+ */
+ public static final boolean UNMAP_SUPPORTED;
+
+ /**
+ * if {@link #UNMAP_SUPPORTED} is {@code false}, this contains the reason
+ * why unmapping is not supported.
+ */
+ public static final String UNMAP_NOT_SUPPORTED_REASON;
+
+
+ private static final BufferCleaner CLEANER;
+
+ /**
+ * Reference to a BufferCleaner that does unmapping.
+ * @return {@code null} if not supported.
+ */
+ public static BufferCleaner getCleaner() {
+ return CLEANER;
+ }
+
+ static {
+ final Object hack = AccessController.doPrivileged(
+ (PrivilegedAction<Object>) CleanerUtil::unmapHackImpl);
+ if (hack instanceof BufferCleaner) {
+ CLEANER = (BufferCleaner) hack;
+ UNMAP_SUPPORTED = true;
+ UNMAP_NOT_SUPPORTED_REASON = null;
+ } else {
+ CLEANER = null;
+ UNMAP_SUPPORTED = false;
+ UNMAP_NOT_SUPPORTED_REASON = hack.toString();
+ }
+ }
+
+ private static Object unmapHackImpl() {
+ final MethodHandles.Lookup lookup = MethodHandles.lookup();
+ try {
+ try {
+ // *** sun.misc.Unsafe unmapping (Java 9+) ***
+ final Class<?> unsafeClass = Class.forName("sun.misc.Unsafe");
+ // first check if Unsafe has the right method, otherwise we can
+ // give up without doing any security critical stuff:
+ final MethodHandle unmapper = lookup.findVirtual(unsafeClass,
+ "invokeCleaner", methodType(void.class, ByteBuffer.class));
+ // fetch the unsafe instance and bind it to the virtual MH:
+ final Field f = unsafeClass.getDeclaredField("theUnsafe");
+ f.setAccessible(true);
+ final Object theUnsafe = f.get(null);
+ return newBufferCleaner(ByteBuffer.class, unmapper.bindTo(theUnsafe));
+ } catch (SecurityException se) {
+ // rethrow to report errors correctly (we need to catch it here,
+ // as we also catch RuntimeException below!):
+ throw se;
+ } catch (ReflectiveOperationException | RuntimeException e) {
+ // *** sun.misc.Cleaner unmapping (Java 8) ***
+ final Class<?> directBufferClass =
+ Class.forName("java.nio.DirectByteBuffer");
+
+ final Method m = directBufferClass.getMethod("cleaner");
+ m.setAccessible(true);
+ final MethodHandle directBufferCleanerMethod = lookup.unreflect(m);
+ final Class<?> cleanerClass =
+ directBufferCleanerMethod.type().returnType();
+
+ /*
+ * "Compile" a MethodHandle that basically is equivalent
+ * to the following code:
+ *
+ * void unmapper(ByteBuffer byteBuffer) {
+ * sun.misc.Cleaner cleaner =
+ * ((java.nio.DirectByteBuffer) byteBuffer).cleaner();
+ * if (Objects.nonNull(cleaner)) {
+ * cleaner.clean();
+ * } else {
+ * // the noop is needed because MethodHandles#guardWithTest
+ * // always needs ELSE
+ * noop(cleaner);
+ * }
+ * }
+ */
+ final MethodHandle cleanMethod = lookup.findVirtual(
+ cleanerClass, "clean", methodType(void.class));
+ final MethodHandle nonNullTest = lookup.findStatic(Objects.class,
+ "nonNull", methodType(boolean.class, Object.class))
+ .asType(methodType(boolean.class, cleanerClass));
+ final MethodHandle noop = dropArguments(
+ constant(Void.class, null).asType(methodType(void.class)),
+ 0, cleanerClass);
+ final MethodHandle unmapper = filterReturnValue(
+ directBufferCleanerMethod,
+ guardWithTest(nonNullTest, cleanMethod, noop))
+ .asType(methodType(void.class, ByteBuffer.class));
+ return newBufferCleaner(directBufferClass, unmapper);
+ }
+ } catch (SecurityException se) {
+ return "Unmapping is not supported, because not all required " +
+ "permissions are given to the Hadoop JAR file: " + se +
+ " [Please grant at least the following permissions: " +
+ "RuntimePermission(\"accessClassInPackage.sun.misc\") " +
+ " and ReflectPermission(\"suppressAccessChecks\")]";
+ } catch (ReflectiveOperationException | RuntimeException e) {
+ return "Unmapping is not supported on this platform, " +
+ "because internal Java APIs are not compatible with " +
+ "this Hadoop version: " + e;
+ }
+ }
+
+ private static BufferCleaner newBufferCleaner(
+ final Class<?> unmappableBufferClass, final MethodHandle unmapper) {
+ assert Objects.equals(
+ methodType(void.class, ByteBuffer.class), unmapper.type());
+ return buffer -> {
+ if (!buffer.isDirect()) {
+ throw new IllegalArgumentException(
+ "unmapping only works with direct buffers");
+ }
+ if (!unmappableBufferClass.isInstance(buffer)) {
+ throw new IllegalArgumentException("buffer is not an instance of " +
+ unmappableBufferClass.getName());
+ }
+ final Throwable error = AccessController.doPrivileged(
+ (PrivilegedAction<Throwable>) () -> {
+ try {
+ unmapper.invokeExact(buffer);
+ return null;
+ } catch (Throwable t) {
+ return t;
+ }
+ });
+ if (error != null) {
+ throw new IOException("Unable to unmap the mapped buffer: " + error);
+ }
+ };
+ }
+
+ /**
+ * Pass in an implementation of this interface to cleanup ByteBuffers.
+ * CleanerUtil implements this to allow unmapping of bytebuffers
+ * with private Java APIs.
+ */
+ @FunctionalInterface
+ public interface BufferCleaner {
+ void freeBuffer(ByteBuffer b) throws IOException;
+ }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 65eea31a32c..19605320a4e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -600,6 +600,23 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
</plugins>
</build>
</profile>
-
+ <profile>
+ <id>java9</id>
+ <activation>
+ <jdk>9</jdk>
+ </activation>
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <testExcludes>
+ <testExclude>org/apache/hadoop/hdfs/TestDFSClientFailover.java</testExclude>
+ </testExcludes>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
</profiles>
</project>
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 04b93c48155..955cf91805e 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -1542,32 +1542,6 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
- <artifactId>animal-sniffer-maven-plugin</artifactId>
- <version>1.16</version>
- <executions>
- <execution>
- <id>signature-check</id>
- <phase>verify</phase>
- <goals>
- <goal>check</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <signature>
- <groupId>org.codehaus.mojo.signature</groupId>
- <artifactId>java18</artifactId>
- <version>1.0</version>
- </signature>
- <ignores>
- <ignore>sun.misc.*</ignore>
- <ignore>sun.net.*</ignore>
- <ignore>sun.nio.ch.*</ignore>
- </ignores>
- </configuration>
- </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment