Last active
December 18, 2015 04:48
-
-
Save sureshsaggar/5727705 to your computer and use it in GitHub Desktop.
Hadoop distcp between hortonworks and cloudera
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
hdfs@hadoop-prod-growthui:~$ hadoop distcp -i hdfs://hadoop-prod-master.vpc:8020/data/analytics/smsrecords hdfs://10.0.0.144:8020/data/analytics/smsrecords | |
13/06/07 07:18:19 INFO tools.DistCp: Input Options: DistCpOptions{atomicCommit=false, syncFolder=false, deleteMissing=false, ignoreFailures=true, maxMaps=20, sslConfigurationFile='null', copyStrategy='uniformsize', sourceFileListing=null, sourcePaths=[hdfs://hadoop-prod-master.vpc:8020/data/analytics/smsrecords], targetPath=hdfs://10.0.0.144:8020/data/analytics/smsrecords} | |
13/06/07 07:18:22 INFO service.AbstractService: Service:org.apache.hadoop.yarn.client.YarnClientImpl is inited. | |
13/06/07 07:18:23 INFO service.AbstractService: Service:org.apache.hadoop.yarn.client.YarnClientImpl is started. | |
13/06/07 07:18:26 ERROR tools.DistCp: Exception encountered | |
java.io.IOException: Failed on local exception: java.io.IOException: Broken pipe; Host Details : local host is: "hadoop-prod-growthui.vpc/10.0.0.230"; destination host is: "ip-10-0-0-144.ap-southeast-1.compute.internal":8020; | |
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:763) | |
at org.apache.hadoop.ipc.Client.call(Client.java:1229) | |
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:202) | |
at com.sun.proxy.$Proxy10.getFileInfo(Unknown Source) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:601) | |
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:164) | |
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:83) | |
at com.sun.proxy.$Proxy10.getFileInfo(Unknown Source) | |
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:629) | |
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1545) | |
at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:820) | |
at org.apache.hadoop.fs.FileSystem.isFile(FileSystem.java:1404) | |
at org.apache.hadoop.tools.SimpleCopyListing.validatePaths(SimpleCopyListing.java:67) | |
at org.apache.hadoop.tools.CopyListing.buildListing(CopyListing.java:79) | |
at org.apache.hadoop.tools.GlobbedCopyListing.doBuildListing(GlobbedCopyListing.java:90) | |
at org.apache.hadoop.tools.CopyListing.buildListing(CopyListing.java:80) | |
at org.apache.hadoop.tools.DistCp.createInputFileListing(DistCp.java:326) | |
at org.apache.hadoop.tools.DistCp.execute(DistCp.java:151) | |
at org.apache.hadoop.tools.DistCp.run(DistCp.java:118) | |
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70) | |
at org.apache.hadoop.tools.DistCp.main(DistCp.java:374) | |
Caused by: java.io.IOException: Broken pipe | |
at sun.nio.ch.FileDispatcherImpl.write0(Native Method) | |
at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:47) | |
at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:94) | |
at sun.nio.ch.IOUtil.write(IOUtil.java:65) | |
at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:450) | |
at org.apache.hadoop.net.SocketOutputStream$Writer.performIO(SocketOutputStream.java:62) | |
at org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:143) | |
at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:153) | |
at org.apache.hadoop.net.SocketOutputStream.write(SocketOutputStream.java:114) | |
at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82) | |
at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:140) | |
at java.io.DataOutputStream.flush(DataOutputStream.java:123) | |
at org.apache.hadoop.ipc.Client$Connection$3.run(Client.java:897) | |
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) | |
at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334) | |
at java.util.concurrent.FutureTask.run(FutureTask.java:166) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) | |
bash-4.1$ hadoop distcp -i -update hftp://10.0.0.217:50070/data/analytics/smshelp hdfs://10.0.0.82:8020/data/analytics/smshelp | |
13/07/05 06:16:37 INFO tools.DistCp: srcPaths=[hftp://10.0.0.217:50070/data/analytics/smshelp] | |
13/07/05 06:16:37 INFO tools.DistCp: destPath=hdfs://10.0.0.82:8020/data/analytics/smshelp |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment