Skip to content

Instantly share code, notes, and snippets.

@ruo91
Created October 24, 2013 13:04
Show Gist options
  • Save ruo91/7136899 to your computer and use it in GitHub Desktop.
Save ruo91/7136899 to your computer and use it in GitHub Desktop.
Flume 1.4.0 stable : ERROR hdfs.HDFSEventSink: process failed
[root@dev conf]# flume-ng agent --conf $FLUME_HOME/conf -f $FLUME_HOME/conf/flume.conf -Dflume.root.logger=DEBUG,console -n YongbokAgent
Info: Sourcing environment configuration script /home/hadoop/flume/conf/flume-env.sh
Info: Including Hadoop libraries found via (/home/hadoop/2.2.0/bin/hadoop) for HDFS access
Info: Excluding /home/hadoop/2.2.0/share/hadoop/common/lib/slf4j-api-1.7.5.jar from classpath
Info: Excluding /home/hadoop/2.2.0/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar from classpath
+ exec /usr/local/jdk/bin/java -Xmx20m -Dflume.root.logger=DEBUG,console -cp '/home/hadoop/flume/conf:/home/hadoop/flume/lib/*:/home/hadoop/2.2.0/etc/hadoop:/home/hadoop/2.2.0/share/hadoop/common/lib/activation-1.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/asm-3.2.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/avro-1.7.4.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-cli-1.2.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-codec-1.4.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-collections-3.2.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-compress-1.4.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-configuration-1.6.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-digester-1.8.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-el-1.0.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-httpclient-3.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-io-2.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-lang-2.5.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-logging-1.1.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-math-2.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/commons-net-3.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/guava-11.0.2.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/hadoop-annotations-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/hadoop-auth-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jackson-core-asl-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jackson-jaxrs-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jackson-mapper-asl-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jackson-xc-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jasper-compiler-5.5.23.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jasper-runtime-5.5.23.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jaxb-api-2.2.2.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jersey-core-1.9.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jersey-json-1.9.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jersey-server-1.9.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jets3t-0.6.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jettison-1.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jetty-6.1.26.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jetty-util-6.1.26.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jsch-0.1.42.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jsp-api-2.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/jsr305-1.3.9.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/junit-4.8.2.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/log4j-1.2.17.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/mockito-all-1.8.5.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/netty-3.6.2.Final.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/paranamer-2.3.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/servlet-api-2.5.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/stax-api-1.0.1.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/xmlenc-0.52.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/xz-1.0.jar:/home/hadoop/2.2.0/share/hadoop/common/lib/zookeeper-3.4.5.jar:/home/hadoop/2.2.0/share/hadoop/common/hadoop-common-2.2.0-tests.jar:/home/hadoop/2.2.0/share/hadoop/common/hadoop-common-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/common/hadoop-nfs-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/common/jdiff:/home/hadoop/2.2.0/share/hadoop/common/lib:/home/hadoop/2.2.0/share/hadoop/common/sources:/home/hadoop/2.2.0/share/hadoop/common/templates:/home/hadoop/2.2.0/share/hadoop/hdfs:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/asm-3.2.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/commons-el-1.0.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/commons-io-2.1.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/commons-lang-2.5.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/commons-logging-1.1.1.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/guava-11.0.2.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jackson-core-asl-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jackson-mapper-asl-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jasper-runtime-5.5.23.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jersey-core-1.9.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jersey-server-1.9.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jsp-api-2.1.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/jsr305-1.3.9.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/servlet-api-2.5.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/lib/xmlenc-0.52.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/hadoop-hdfs-2.2.0-tests.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/hadoop-hdfs-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/hadoop-hdfs-nfs-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/hdfs/jdiff:/home/hadoop/2.2.0/share/hadoop/hdfs/lib:/home/hadoop/2.2.0/share/hadoop/hdfs/sources:/home/hadoop/2.2.0/share/hadoop/hdfs/templates:/home/hadoop/2.2.0/share/hadoop/hdfs/webapps:/home/hadoop/2.2.0/share/hadoop/yarn/lib/aopalliance-1.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/asm-3.2.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/avro-1.7.4.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/commons-io-2.1.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/guice-3.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/hadoop-annotations-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/hamcrest-core-1.1.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/jackson-core-asl-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/jackson-mapper-asl-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/javax.inject-1.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/jersey-core-1.9.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/jersey-server-1.9.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/junit-4.10.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/log4j-1.2.17.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/netty-3.6.2.Final.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/paranamer-2.3.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/snappy-java-1.0.4.1.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib/xz-1.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-api-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-applications-distributedshell-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-applications-unmanaged-am-launcher-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-client-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-common-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-server-common-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-server-nodemanager-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-server-resourcemanager-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-server-tests-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-server-web-proxy-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/hadoop-yarn-site-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/yarn/lib:/home/hadoop/2.2.0/share/hadoop/yarn/lib-examples:/home/hadoop/2.2.0/share/hadoop/yarn/sources:/home/hadoop/2.2.0/share/hadoop/yarn/test:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/aopalliance-1.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/asm-3.2.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/avro-1.7.4.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/commons-compress-1.4.1.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/commons-io-2.1.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/guice-3.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/guice-servlet-3.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/hadoop-annotations-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/hamcrest-core-1.1.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/jackson-core-asl-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/jackson-mapper-asl-1.8.8.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/javax.inject-1.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/jersey-core-1.9.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/jersey-guice-1.9.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/jersey-server-1.9.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/junit-4.10.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/log4j-1.2.17.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/netty-3.6.2.Final.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/paranamer-2.3.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/protobuf-java-2.5.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/snappy-java-1.0.4.1.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib/xz-1.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-client-app-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-client-common-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-client-hs-plugins-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.2.0-tests.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-client-shuffle-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.2.0.jar:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib:/home/hadoop/2.2.0/share/hadoop/mapreduce/lib-examples:/home/hadoop/2.2.0/share/hadoop/mapreduce/sources:/contrib/capacity-scheduler/*.jar' -Djava.library.path=:/home/hadoop/2.2.0/lib/native org.apache.flume.node.Application -f /home/hadoop/flume/conf/flume.conf -n YongbokAgent
13/10/24 21:15:40 INFO node.PollingPropertiesFileConfigurationProvider: Configuration provider starting
13/10/24 21:15:40 INFO node.PollingPropertiesFileConfigurationProvider: Reloading configuration file:/home/hadoop/flume/conf/flume.conf
13/10/24 21:15:40 INFO conf.FlumeConfiguration: Processing:HDFS
13/10/24 21:15:40 INFO conf.FlumeConfiguration: Added sinks: HDFS Agent: YongbokAgent
13/10/24 21:15:40 INFO conf.FlumeConfiguration: Processing:HDFS
13/10/24 21:15:40 INFO conf.FlumeConfiguration: Processing:HDFS
13/10/24 21:15:40 INFO conf.FlumeConfiguration: Processing:HDFS
13/10/24 21:15:40 INFO conf.FlumeConfiguration: Processing:HDFS
13/10/24 21:15:40 INFO conf.FlumeConfiguration: Processing:HDFS
13/10/24 21:15:40 INFO conf.FlumeConfiguration: Processing:HDFS
13/10/24 21:15:41 INFO conf.FlumeConfiguration: Post-validation flume configuration contains configuration for agents: [YongbokAgent]
13/10/24 21:15:41 INFO node.AbstractConfigurationProvider: Creating channels
13/10/24 21:15:41 INFO channel.DefaultChannelFactory: Creating instance of channel MemoryChannel type memory
13/10/24 21:15:41 INFO node.AbstractConfigurationProvider: Created channel MemoryChannel
13/10/24 21:15:41 INFO source.DefaultSourceFactory: Creating instance of source Yongbok, type exec
13/10/24 21:15:41 INFO sink.DefaultSinkFactory: Creating instance of sink: HDFS, type: hdfs
Java HotSpot(TM) 64-Bit Server VM warning: You have loaded library /home/hadoop/2.2.0/lib/native/libhadoop.so.1.0.0 which might have disabled stack guard. The VM will try to fix the stack guard now.
It's highly recommended that you fix the library with 'execstack -c <libfile>', or link it with '-z noexecstack'.
13/10/24 21:15:42 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
13/10/24 21:15:43 INFO hdfs.HDFSEventSink: Hadoop Security enabled: false
13/10/24 21:15:43 INFO node.AbstractConfigurationProvider: Channel MemoryChannel connected to [Yongbok, HDFS]
13/10/24 21:15:43 INFO node.Application: Starting new configuration:{ sourceRunners:{Yongbok=EventDrivenSourceRunner: { source:org.apache.flume.source.ExecSource{name:Yongbok,state:IDLE} }} sinkRunners:{HDFS=SinkRunner: { policy:org.apache.flume.sink.DefaultSinkProcessor@5141928f counterGroup:{ name:null counters:{} } }} channels:{MemoryChannel=org.apache.flume.channel.MemoryChannel{name: MemoryChannel}} }
13/10/24 21:15:43 INFO node.Application: Starting Channel MemoryChannel
13/10/24 21:15:44 INFO instrumentation.MonitoredCounterGroup: Monitoried counter group for type: CHANNEL, name: MemoryChannel, registered successfully.
13/10/24 21:15:44 INFO instrumentation.MonitoredCounterGroup: Component type: CHANNEL, name: MemoryChannel started
13/10/24 21:15:44 INFO node.Application: Starting Sink HDFS
13/10/24 21:15:44 INFO node.Application: Starting Source Yongbok
13/10/24 21:15:44 INFO source.ExecSource: Exec source starting with command:tail -F /usr/local/nginx/logs/mirror-access.json
13/10/24 21:15:44 INFO instrumentation.MonitoredCounterGroup: Monitoried counter group for type: SINK, name: HDFS, registered successfully.
13/10/24 21:15:44 INFO instrumentation.MonitoredCounterGroup: Component type: SINK, name: HDFS started
13/10/24 21:15:44 INFO instrumentation.MonitoredCounterGroup: Monitoried counter group for type: SOURCE, name: Yongbok, registered successfully.
13/10/24 21:15:44 INFO instrumentation.MonitoredCounterGroup: Component type: SOURCE, name: Yongbok started
13/10/24 21:15:48 INFO hdfs.HDFSSequenceFile: writeFormat = Text, UseRawLocalFileSystem = false
13/10/24 21:15:48 INFO hdfs.BucketWriter: Creating hdfs://localhost:9000/FlumeData.1382616948181.tmp
13/10/24 21:15:49 ERROR hdfs.HDFSEventSink: process failed
java.lang.VerifyError: class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$SetOwnerRequestProto overrides final method getUnknownFields.()Lcom/google/protobuf/UnknownFieldSet;
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:792)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.getDeclaredMethods0(Native Method)
at java.lang.Class.privateGetDeclaredMethods(Class.java:2521)
at java.lang.Class.privateGetPublicMethods(Class.java:2641)
at java.lang.Class.privateGetPublicMethods(Class.java:2651)
at java.lang.Class.getMethods(Class.java:1457)
at sun.misc.ProxyGenerator.generateClassFile(ProxyGenerator.java:426)
at sun.misc.ProxyGenerator.generateProxyClass(ProxyGenerator.java:323)
at java.lang.reflect.Proxy.getProxyClass0(Proxy.java:636)
at java.lang.reflect.Proxy.newProxyInstance(Proxy.java:722)
at org.apache.hadoop.ipc.ProtobufRpcEngine.getProxy(ProtobufRpcEngine.java:92)
at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:537)
at org.apache.hadoop.hdfs.NameNodeProxies.createNNProxyWithClientProtocol(NameNodeProxies.java:328)
at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:235)
at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:139)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:510)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:453)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:136)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2433)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:287)
at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:226)
at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:220)
at org.apache.flume.sink.hdfs.BucketWriter$8$1.run(BucketWriter.java:536)
at org.apache.flume.sink.hdfs.BucketWriter.runPrivileged(BucketWriter.java:160)
at org.apache.flume.sink.hdfs.BucketWriter.access$1000(BucketWriter.java:56)
at org.apache.flume.sink.hdfs.BucketWriter$8.call(BucketWriter.java:533)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)
Exception in thread "SinkRunner-PollingRunner-DefaultSinkProcessor" java.lang.VerifyError: class org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$SetOwnerRequestProto overrides final method getUnknownFields.()Lcom/google/protobuf/UnknownFieldSet;
at java.lang.ClassLoader.defineClass1(Native Method)
at java.lang.ClassLoader.defineClass(ClassLoader.java:792)
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
at java.net.URLClassLoader.defineClass(URLClassLoader.java:449)
at java.net.URLClassLoader.access$100(URLClassLoader.java:71)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.getDeclaredMethods0(Native Method)
at java.lang.Class.privateGetDeclaredMethods(Class.java:2521)
at java.lang.Class.privateGetPublicMethods(Class.java:2641)
at java.lang.Class.privateGetPublicMethods(Class.java:2651)
at java.lang.Class.getMethods(Class.java:1457)
at sun.misc.ProxyGenerator.generateClassFile(ProxyGenerator.java:426)
at sun.misc.ProxyGenerator.generateProxyClass(ProxyGenerator.java:323)
at java.lang.reflect.Proxy.getProxyClass0(Proxy.java:636)
at java.lang.reflect.Proxy.newProxyInstance(Proxy.java:722)
at org.apache.hadoop.ipc.ProtobufRpcEngine.getProxy(ProtobufRpcEngine.java:92)
at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:537)
at org.apache.hadoop.hdfs.NameNodeProxies.createNNProxyWithClientProtocol(NameNodeProxies.java:328)
at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:235)
at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:139)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:510)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:453)
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:136)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2433)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:88)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2467)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2449)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:367)
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:287)
at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:226)
at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:220)
at org.apache.flume.sink.hdfs.BucketWriter$8$1.run(BucketWriter.java:536)
at org.apache.flume.sink.hdfs.BucketWriter.runPrivileged(BucketWriter.java:160)
at org.apache.flume.sink.hdfs.BucketWriter.access$1000(BucketWriter.java:56)
at org.apache.flume.sink.hdfs.BucketWriter$8.call(BucketWriter.java:533)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:724)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment