Created
June 22, 2012 04:10
-
-
Save rjurney/2970165 to your computer and use it in GitHub Desktop.
Error when storing to HCatalog from Pig
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
export HADOOP_HOME=/home/hadoop | |
export HCAT_HOME=/usr/local/hcat | |
export PIG_HOME=/home/hadoop/pig-0.10.0 | |
export HIVE_HOME=/home/hadoop/hive-0.9.0 | |
export FORREST_HOME=/home/hadoop/apache-forrest-0.9 | |
export PIG_CLASSPATH=$HCAT_HOME/share/hcatalog/hcatalog-0.4.0.jar:$HIVE_HOME/lib/hive-metastore-0.9.0.jar: | |
$HIVE_HOME/lib/libthrift-0.7.0.jar:$HIVE_HOME/lib/hive-exec-0.9.0.jar:$HIVE_HOME/lib/libfb303-0.7.0.jar: | |
$HIVE_HOME/lib/jdo2-api-2.3-ec.jar:$HIVE_HOME/conf:$HADOOP_HOME/conf:$HIVE_HOME/lib/slf4j-api-1.6.1.jar | |
export PIG_OPTS=-Dhive.metastore.uris=thrift://localhost:10001 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Pig Stack Trace | |
--------------- | |
ERROR 1115: org.apache.hcatalog.common.HCatException : 2001 : Error setting output information. Cause : MetaException(message:org.apache.hadoop.hive.metastore.HiveAlterHandler class not found) | |
org.apache.pig.impl.plan.VisitorException: ERROR 1115: | |
<file hcat.pig, line 34, column 0> Output Location Validation Failed for: 'from_to_week More info to follow: | |
org.apache.hcatalog.common.HCatException : 2001 : Error setting output information. Cause : MetaException(message:org.apache.hadoop.hive.metastore.HiveAlterHandler class not found) | |
at org.apache.pig.newplan.logical.rules.InputOutputFileValidator$InputOutputFileVisitor.visit(InputOutputFileValidator.java:75) | |
at org.apache.pig.newplan.logical.relational.LOStore.accept(LOStore.java:77) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:64) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:66) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:66) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:66) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:66) | |
at org.apache.pig.newplan.DepthFirstWalker.walk(DepthFirstWalker.java:53) | |
at org.apache.pig.newplan.PlanVisitor.visit(PlanVisitor.java:50) | |
at org.apache.pig.newplan.logical.rules.InputOutputFileValidator.validate(InputOutputFileValidator.java:45) | |
at org.apache.pig.backend.hadoop.executionengine.HExecutionEngine.compile(HExecutionEngine.java:293) | |
at org.apache.pig.PigServer.compilePp(PigServer.java:1316) | |
at org.apache.pig.PigServer.executeCompiledLogicalPlan(PigServer.java:1253) | |
at org.apache.pig.PigServer.execute(PigServer.java:1245) | |
at org.apache.pig.PigServer.executeBatch(PigServer.java:362) | |
at org.apache.pig.tools.grunt.GruntParser.executeBatch(GruntParser.java:132) | |
at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:193) | |
at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:165) | |
at org.apache.pig.tools.grunt.Grunt.exec(Grunt.java:84) | |
at org.apache.pig.Main.run(Main.java:555) | |
at org.apache.pig.Main.main(Main.java:111) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) | |
at java.lang.reflect.Method.invoke(Method.java:597) | |
at org.apache.hadoop.util.RunJar.main(RunJar.java:156) | |
Caused by: org.apache.pig.PigException: ERROR 1115: org.apache.hcatalog.common.HCatException : 2001 : Error setting output information. Cause : MetaException(message:org.apache.hadoop.hive.metastore.HiveAlterHandler class not found) | |
at org.apache.hcatalog.pig.HCatStorer.setStoreLocation(HCatStorer.java:112) | |
at org.apache.pig.newplan.logical.rules.InputOutputFileValidator$InputOutputFileVisitor.visit(InputOutputFileValidator.java:68) | |
... 25 more | |
Caused by: org.apache.hcatalog.common.HCatException : 2001 : Error setting output information. Cause : MetaException(message:org.apache.hadoop.hive.metastore.HiveAlterHandler class not found) | |
at org.apache.hcatalog.mapreduce.HCatOutputFormat.setOutput(HCatOutputFormat.java:199) | |
at org.apache.hcatalog.pig.HCatStorer.setStoreLocation(HCatStorer.java:108) | |
... 26 more | |
Caused by: MetaException(message:org.apache.hadoop.hive.metastore.HiveAlterHandler class not found) | |
at org.apache.hadoop.hive.metastore.MetaStoreUtils.getClass(MetaStoreUtils.java:1001) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:274) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:248) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:114) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:98) | |
at org.apache.hcatalog.common.HCatUtil.createHiveClient(HCatUtil.java:624) | |
at org.apache.hcatalog.mapreduce.HCatOutputFormat.setOutput(HCatOutputFormat.java:79) | |
... 27 more | |
================================================================================ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* HCatalog */ | |
register /usr/local/hcat/share/hcatalog/*.jar | |
register /home/hadoop/hive-0.9.0/lib/*.jar | |
define HCatStorer org.apache.hcatalog.pig.HCatStorer(); | |
/* Avro */ | |
register /home/hadoop/pig-0.10.0/build/ivy/lib/Pig/avro-1.5.3.jar | |
register /home/hadoop/pig-0.10.0/build/ivy/lib/Pig/json-simple-1.1.jar | |
register /home/hadoop/pig-0.10.0/contrib/piggybank/java/piggybank.jar | |
register /home/hadoop/pig-0.10.0/build/ivy/lib/Pig/jackson-core-asl-1.7.3.jar | |
register /home/hadoop/pig-0.10.0/build/ivy/lib/Pig/jackson-mapper-asl-1.7.3.jar | |
define AvroStorage org.apache.pig.piggybank.storage.avro.AvroStorage(); | |
/* Date rounding into weekly buckets */ | |
register /home/hadoop/pig-0.10.0/build/ivy/lib/Pig/joda-time-1.6.jar | |
define ISOToWeek org.apache.pig.piggybank.evaluation.datetime.truncate.ISOToWeek(); | |
/* Cleanup the last run */ | |
-- rmf /tmp/test | |
/* Load the enron emails from s3 */ | |
emails = load 's3://rjurney.public/enron.avro' using AvroStorage(); | |
/* Only include emails with both a from and at least one to address (some emails are only bcc) */ | |
emails = filter emails by (from is not null) and (tos is not null) and (date is not null); | |
/* Project all pairs and round to the week */ | |
pairs = foreach emails generate from.(address) as from, | |
FLATTEN(tos.(address)) as to, | |
ISOToWeek(date) as week; | |
-- store pairs into '/tmp/test'; | |
store pairs into 'from_to_week' using org.apache.hcatalog.pig.HCatStorer(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
hadoop@ip-10-4-115-51:~/pig-0.10.0$ ls /usr/local/hcat/share/hcatalog/*.jar | |
/usr/local/hcat/share/hcatalog/hcatalog-0.4.0-dev.jar /usr/local/hcat/share/hcatalog/hcatalog-server-extensions-0.4.0-dev.jar | |
/usr/local/hcat/share/hcatalog/hcatalog-0.4.0.jar /usr/local/hcat/share/hcatalog/hcatalog-server-extensions-0.4.0.jar | |
hadoop@ip-10-4-115-51:~/pig-0.10.0$ ls /home/hadoop/hive-0.9.0/lib/*.jar | |
/home/hadoop/hive-0.9.0/lib/antlr-runtime-3.0.1.jar /home/hadoop/hive-0.9.0/lib/hive-hwi-0.9.0.jar | |
/home/hadoop/hive-0.9.0/lib/commons-cli-1.2.jar /home/hadoop/hive-0.9.0/lib/hive-jdbc-0.9.0.jar | |
/home/hadoop/hive-0.9.0/lib/commons-codec-1.3.jar /home/hadoop/hive-0.9.0/lib/hive-metastore-0.9.0.jar | |
/home/hadoop/hive-0.9.0/lib/commons-collections-3.2.1.jar /home/hadoop/hive-0.9.0/lib/hive-pdk-0.9.0.jar | |
/home/hadoop/hive-0.9.0/lib/commons-dbcp-1.4.jar /home/hadoop/hive-0.9.0/lib/hive-serde-0.9.0.jar | |
/home/hadoop/hive-0.9.0/lib/commons-lang-2.4.jar /home/hadoop/hive-0.9.0/lib/hive-service-0.9.0.jar | |
/home/hadoop/hive-0.9.0/lib/commons-logging-1.0.4.jar /home/hadoop/hive-0.9.0/lib/hive-shims-0.9.0.jar | |
/home/hadoop/hive-0.9.0/lib/commons-logging-api-1.0.4.jar /home/hadoop/hive-0.9.0/lib/jackson-core-asl-1.8.8.jar | |
/home/hadoop/hive-0.9.0/lib/commons-pool-1.5.4.jar /home/hadoop/hive-0.9.0/lib/jackson-jaxrs-1.8.8.jar | |
/home/hadoop/hive-0.9.0/lib/datanucleus-connectionpool-2.0.3.jar /home/hadoop/hive-0.9.0/lib/jackson-mapper-asl-1.8.8.jar | |
/home/hadoop/hive-0.9.0/lib/datanucleus-core-2.0.3.jar /home/hadoop/hive-0.9.0/lib/jackson-xc-1.8.8.jar | |
/home/hadoop/hive-0.9.0/lib/datanucleus-enhancer-2.0.3.jar /home/hadoop/hive-0.9.0/lib/JavaEWAH-0.3.2.jar | |
/home/hadoop/hive-0.9.0/lib/datanucleus-rdbms-2.0.3.jar /home/hadoop/hive-0.9.0/lib/jdo2-api-2.3-ec.jar | |
/home/hadoop/hive-0.9.0/lib/derby-10.4.2.0.jar /home/hadoop/hive-0.9.0/lib/jline-0.9.94.jar | |
/home/hadoop/hive-0.9.0/lib/guava-r09.jar /home/hadoop/hive-0.9.0/lib/json-20090211.jar | |
/home/hadoop/hive-0.9.0/lib/hbase-0.92.0.jar /home/hadoop/hive-0.9.0/lib/libfb303-0.7.0.jar | |
/home/hadoop/hive-0.9.0/lib/hbase-0.92.0-tests.jar /home/hadoop/hive-0.9.0/lib/libfb303.jar | |
/home/hadoop/hive-0.9.0/lib/hive-builtins-0.9.0.jar /home/hadoop/hive-0.9.0/lib/libthrift-0.7.0.jar | |
/home/hadoop/hive-0.9.0/lib/hive-cli-0.9.0.jar /home/hadoop/hive-0.9.0/lib/libthrift.jar | |
/home/hadoop/hive-0.9.0/lib/hive-common-0.9.0.jar /home/hadoop/hive-0.9.0/lib/log4j-1.2.16.jar | |
/home/hadoop/hive-0.9.0/lib/hive-contrib-0.9.0.jar /home/hadoop/hive-0.9.0/lib/slf4j-api-1.6.1.jar | |
/home/hadoop/hive-0.9.0/lib/hive_contrib.jar /home/hadoop/hive-0.9.0/lib/slf4j-log4j12-1.6.1.jar | |
/home/hadoop/hive-0.9.0/lib/hive-exec-0.9.0.jar /home/hadoop/hive-0.9.0/lib/stringtemplate-3.1-b1.jar | |
/home/hadoop/hive-0.9.0/lib/hive-hbase-handler-0.9.0.jar /home/hadoop/hive-0.9.0/lib/zookeeper-3.4.3.jar |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
grunt> store pairs into 'from_to_week' using org.apache.hcatalog.pig.HCatStorer(); | |
2012-06-22 19:31:07,498 [main] INFO org.apache.hadoop.fs.s3native.NativeS3FileSystem - Opening 's3://rjurney.public/enron.avro' for reading | |
2012-06-22 19:31:08,143 [main] INFO org.apache.hadoop.fs.s3native.NativeS3FileSystem - Opening 's3://rjurney.public/enron.avro' for reading | |
2012-06-22 19:31:08,573 [main] INFO org.apache.pig.tools.pigstats.ScriptState - Pig features used in the script: FILTER | |
2012-06-22 19:31:08,738 [main] INFO org.apache.pig.newplan.logical.rules.ColumnPruneVisitor - Columns pruned for emails: $0, $3, $4, $6, $7 | |
2012-06-22 19:31:08,755 [main] ERROR org.apache.pig.tools.grunt.Grunt - ERROR 2998: Unhandled internal error. org/apache/hadoop/hive/ql/metadata/HiveStorageHandler | |
2012-06-22 19:31:08,755 [main] ERROR org.apache.pig.tools.grunt.Grunt - java.lang.NoClassDefFoundError: org/apache/hadoop/hive/ql/metadata/HiveStorageHandler | |
at java.lang.ClassLoader.defineClass1(Native Method) | |
at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) | |
at java.lang.ClassLoader.defineClass(ClassLoader.java:615) | |
at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) | |
at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) | |
at java.net.URLClassLoader.access$000(URLClassLoader.java:58) | |
at java.net.URLClassLoader$1.run(URLClassLoader.java:197) | |
at java.security.AccessController.doPrivileged(Native Method) | |
at java.net.URLClassLoader.findClass(URLClassLoader.java:190) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:306) | |
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:247) | |
at org.apache.hcatalog.pig.HCatStorer.setStoreLocation(HCatStorer.java:98) | |
at org.apache.pig.newplan.logical.rules.InputOutputFileValidator$InputOutputFileVisitor.visit(InputOutputFileValidator.java:68) | |
at org.apache.pig.newplan.logical.relational.LOStore.accept(LOStore.java:77) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:64) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:66) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:66) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:66) | |
at org.apache.pig.newplan.DepthFirstWalker.depthFirst(DepthFirstWalker.java:66) | |
at org.apache.pig.newplan.DepthFirstWalker.walk(DepthFirstWalker.java:53) | |
at org.apache.pig.newplan.PlanVisitor.visit(PlanVisitor.java:50) | |
at org.apache.pig.newplan.logical.rules.InputOutputFileValidator.validate(InputOutputFileValidator.java:45) | |
at org.apache.pig.backend.hadoop.executionengine.HExecutionEngine.compile(HExecutionEngine.java:293) | |
at org.apache.pig.PigServer.compilePp(PigServer.java:1316) | |
at org.apache.pig.PigServer.executeCompiledLogicalPlan(PigServer.java:1253) | |
at org.apache.pig.PigServer.execute(PigServer.java:1245) | |
at org.apache.pig.PigServer.access$400(PigServer.java:127) | |
at org.apache.pig.PigServer$Graph.registerQuery(PigServer.java:1547) | |
at org.apache.pig.PigServer.registerQuery(PigServer.java:540) | |
at org.apache.pig.tools.grunt.GruntParser.processPig(GruntParser.java:970) | |
at org.apache.pig.tools.pigscript.parser.PigScriptParser.parse(PigScriptParser.java:386) | |
at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:189) | |
at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:165) | |
at org.apache.pig.tools.grunt.Grunt.run(Grunt.java:69) | |
at org.apache.pig.Main.run(Main.java:490) | |
at org.apache.pig.Main.main(Main.java:111) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) | |
at java.lang.reflect.Method.invoke(Method.java:597) | |
at org.apache.hadoop.util.RunJar.main(RunJar.java:156) | |
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.hive.ql.metadata.HiveStorageHandler | |
at java.net.URLClassLoader$1.run(URLClassLoader.java:202) | |
at java.security.AccessController.doPrivileged(Native Method) | |
at java.net.URLClassLoader.findClass(URLClassLoader.java:190) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:306) | |
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) | |
at java.lang.ClassLoader.loadClass(ClassLoader.java:247) | |
... 42 more |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
bin/pig -Dpig.additional.jars=$HCAT_HOME/share/hcatalog/hcatalog-0.4.0.jar:$HIVE_HOME/lib/hive-metastore-0.9.0.jar:$HIVE_HOME/lib/libthrift-0.7.0.jar:$HIVE_HOME/lib/hive-exec-0.9.0.jar:$HIVE_HOME/lib/libfb303-0.7.0.jar:$HIVE_HOME/lib/jdo2-api-2.3-ec.jar:$HIVE_HOME/lib/slf4j-api-1.6.1.jar -l /tmp -v -w | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment