Created
April 19, 2018 11:27
-
-
Save mshtelma/c5ee8206200533fc1d606964dd5a30e2 to your computer and use it in GitHub Desktop.
Stacktrace: ANALYZE TABLE with enable spark.sql.statistics.histogram.enabled
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.metadata.HiveException: Unable to alter table. Put request failed : INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
org.datanucleus.exceptions.NucleusDataStoreException: Put request failed : INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:230) | |
at org.datanucleus.store.types.backed.HashMap.initialise(HashMap.java:203) | |
at org.datanucleus.store.types.SCOUtils.createSCOWrapper(SCOUtils.java:247) | |
at org.datanucleus.store.types.SCOUtils.newSCOInstance(SCOUtils.java:138) | |
at org.datanucleus.state.JDOStateManager.wrapSCOField(JDOStateManager.java:3627) | |
at org.datanucleus.state.JDOStateManager.setObjectField(JDOStateManager.java:1958) | |
at org.apache.hadoop.hive.metastore.model.MTable.jdoSetparameters(MTable.java) | |
at org.apache.hadoop.hive.metastore.model.MTable.setParameters(MTable.java:126) | |
at org.apache.hadoop.hive.metastore.ObjectStore.alterTable(ObjectStore.java:2767) | |
at sun.reflect.GeneratedMethodAccessor103.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) | |
at com.sun.proxy.$Proxy31.alterTable(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveAlterHandler.alterTable(HiveAlterHandler.java:243) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_core(HiveMetaStore.java:3408) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_with_cascade(HiveMetaStore.java:3380) | |
at sun.reflect.GeneratedMethodAccessor102.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107) | |
at com.sun.proxy.$Proxy33.alter_table_with_cascade(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.alter_table(HiveMetaStoreClient.java:340) | |
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.alter_table(SessionHiveMetaStoreClient.java:251) | |
at sun.reflect.GeneratedMethodAccessor101.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) | |
at com.sun.proxy.$Proxy34.alter_table(Unknown Source) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:496) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:484) | |
at sun.reflect.GeneratedMethodAccessor100.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.sql.hive.client.Shim_v0_12.alterTable(HiveShim.scala:401) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply$mcV$sp(HiveClientImpl.scala:492) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:272) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:210) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:209) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:255) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClient$class.alterTable(HiveClient.scala:95) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:83) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply$mcV$sp(HiveExternalCatalog.scala:680) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.doAlterTableStats(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.alterTableStats(ExternalCatalog.scala:187) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.alterTableStats(SessionCatalog.scala:373) | |
at org.apache.spark.sql.execution.command.AnalyzeColumnCommand.run(AnalyzeColumnCommand.scala:60) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.lambda$computeStatistics$1(AbstractCheckpointStrategy.java:80) | |
at java.util.Optional.ifPresent(Optional.java:159) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.computeStatistics(AbstractCheckpointStrategy.java:80) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.persist(AbstractCheckpointStrategy.java:49) | |
at com.bearingpoint.abacus.x1onspark.exec.Instruction.persist(Instruction.java:72) | |
at com.bearingpoint.abacus.x1onspark.exec.ProvideMissingColumns.execute(ProvideMissingColumns.java:134) | |
at com.bearingpoint.abacus.x1onspark.exec.ParallelInstruction.lambda$execute$0(ParallelInstruction.java:45) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: org.datanucleus.store.rdbms.exceptions.MappedDatastoreException: INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1078) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:220) | |
... 73 more | |
Caused by: java.sql.SQLDataException: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeLargeUpdate(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeUpdate(Unknown Source) | |
at com.jolbox.bonecp.PreparedStatementHandle.executeUpdate(PreparedStatementHandle.java:205) | |
at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeUpdate(ParamLoggingPreparedStatement.java:399) | |
at org.datanucleus.store.rdbms.SQLController.executeStatementUpdate(SQLController.java:439) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1069) | |
... 74 more | |
Caused by: ERROR 22001: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.types.SQLChar.hasNonBlankChars(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.DataTypeDescriptor.normalize(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeColumn(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeRow(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.DMLWriteResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.open(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) | |
... 82 more | |
Nested Throwables StackTrace: | |
org.datanucleus.store.rdbms.exceptions.MappedDatastoreException: INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1078) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:220) | |
at org.datanucleus.store.types.backed.HashMap.initialise(HashMap.java:203) | |
at org.datanucleus.store.types.SCOUtils.createSCOWrapper(SCOUtils.java:247) | |
at org.datanucleus.store.types.SCOUtils.newSCOInstance(SCOUtils.java:138) | |
at org.datanucleus.state.JDOStateManager.wrapSCOField(JDOStateManager.java:3627) | |
at org.datanucleus.state.JDOStateManager.setObjectField(JDOStateManager.java:1958) | |
at org.apache.hadoop.hive.metastore.model.MTable.jdoSetparameters(MTable.java) | |
at org.apache.hadoop.hive.metastore.model.MTable.setParameters(MTable.java:126) | |
at org.apache.hadoop.hive.metastore.ObjectStore.alterTable(ObjectStore.java:2767) | |
at sun.reflect.GeneratedMethodAccessor103.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) | |
at com.sun.proxy.$Proxy31.alterTable(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveAlterHandler.alterTable(HiveAlterHandler.java:243) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_core(HiveMetaStore.java:3408) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_with_cascade(HiveMetaStore.java:3380) | |
at sun.reflect.GeneratedMethodAccessor102.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107) | |
at com.sun.proxy.$Proxy33.alter_table_with_cascade(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.alter_table(HiveMetaStoreClient.java:340) | |
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.alter_table(SessionHiveMetaStoreClient.java:251) | |
at sun.reflect.GeneratedMethodAccessor101.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) | |
at com.sun.proxy.$Proxy34.alter_table(Unknown Source) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:496) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:484) | |
at sun.reflect.GeneratedMethodAccessor100.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.sql.hive.client.Shim_v0_12.alterTable(HiveShim.scala:401) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply$mcV$sp(HiveClientImpl.scala:492) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:272) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:210) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:209) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:255) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClient$class.alterTable(HiveClient.scala:95) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:83) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply$mcV$sp(HiveExternalCatalog.scala:680) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.doAlterTableStats(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.alterTableStats(ExternalCatalog.scala:187) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.alterTableStats(SessionCatalog.scala:373) | |
at org.apache.spark.sql.execution.command.AnalyzeColumnCommand.run(AnalyzeColumnCommand.scala:60) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.lambda$computeStatistics$1(AbstractCheckpointStrategy.java:80) | |
at java.util.Optional.ifPresent(Optional.java:159) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.computeStatistics(AbstractCheckpointStrategy.java:80) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.persist(AbstractCheckpointStrategy.java:49) | |
at com.bearingpoint.abacus.x1onspark.exec.Instruction.persist(Instruction.java:72) | |
at com.bearingpoint.abacus.x1onspark.exec.ProvideMissingColumns.execute(ProvideMissingColumns.java:134) | |
at com.bearingpoint.abacus.x1onspark.exec.ParallelInstruction.lambda$execute$0(ParallelInstruction.java:45) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: java.sql.SQLDataException: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeLargeUpdate(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeUpdate(Unknown Source) | |
at com.jolbox.bonecp.PreparedStatementHandle.executeUpdate(PreparedStatementHandle.java:205) | |
at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeUpdate(ParamLoggingPreparedStatement.java:399) | |
at org.datanucleus.store.rdbms.SQLController.executeStatementUpdate(SQLController.java:439) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1069) | |
... 74 more | |
Caused by: ERROR 22001: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.types.SQLChar.hasNonBlankChars(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.DataTypeDescriptor.normalize(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeColumn(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeRow(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.DMLWriteResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.open(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) | |
... 82 more | |
; | |
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:106) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.doAlterTableStats(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.alterTableStats(ExternalCatalog.scala:187) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.alterTableStats(SessionCatalog.scala:373) | |
at org.apache.spark.sql.execution.command.AnalyzeColumnCommand.run(AnalyzeColumnCommand.scala:60) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.lambda$computeStatistics$1(AbstractCheckpointStrategy.java:80) | |
at java.util.Optional.ifPresent(Optional.java:159) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.computeStatistics(AbstractCheckpointStrategy.java:80) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.persist(AbstractCheckpointStrategy.java:49) | |
at com.bearingpoint.abacus.x1onspark.exec.Instruction.persist(Instruction.java:72) | |
at com.bearingpoint.abacus.x1onspark.exec.ProvideMissingColumns.execute(ProvideMissingColumns.java:134) | |
at com.bearingpoint.abacus.x1onspark.exec.ParallelInstruction.lambda$execute$0(ParallelInstruction.java:45) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Unable to alter table. Put request failed : INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
org.datanucleus.exceptions.NucleusDataStoreException: Put request failed : INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:230) | |
at org.datanucleus.store.types.backed.HashMap.initialise(HashMap.java:203) | |
at org.datanucleus.store.types.SCOUtils.createSCOWrapper(SCOUtils.java:247) | |
at org.datanucleus.store.types.SCOUtils.newSCOInstance(SCOUtils.java:138) | |
at org.datanucleus.state.JDOStateManager.wrapSCOField(JDOStateManager.java:3627) | |
at org.datanucleus.state.JDOStateManager.setObjectField(JDOStateManager.java:1958) | |
at org.apache.hadoop.hive.metastore.model.MTable.jdoSetparameters(MTable.java) | |
at org.apache.hadoop.hive.metastore.model.MTable.setParameters(MTable.java:126) | |
at org.apache.hadoop.hive.metastore.ObjectStore.alterTable(ObjectStore.java:2767) | |
at sun.reflect.GeneratedMethodAccessor103.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) | |
at com.sun.proxy.$Proxy31.alterTable(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveAlterHandler.alterTable(HiveAlterHandler.java:243) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_core(HiveMetaStore.java:3408) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_with_cascade(HiveMetaStore.java:3380) | |
at sun.reflect.GeneratedMethodAccessor102.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107) | |
at com.sun.proxy.$Proxy33.alter_table_with_cascade(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.alter_table(HiveMetaStoreClient.java:340) | |
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.alter_table(SessionHiveMetaStoreClient.java:251) | |
at sun.reflect.GeneratedMethodAccessor101.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) | |
at com.sun.proxy.$Proxy34.alter_table(Unknown Source) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:496) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:484) | |
at sun.reflect.GeneratedMethodAccessor100.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.sql.hive.client.Shim_v0_12.alterTable(HiveShim.scala:401) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply$mcV$sp(HiveClientImpl.scala:492) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:272) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:210) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:209) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:255) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClient$class.alterTable(HiveClient.scala:95) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:83) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply$mcV$sp(HiveExternalCatalog.scala:680) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.doAlterTableStats(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.alterTableStats(ExternalCatalog.scala:187) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.alterTableStats(SessionCatalog.scala:373) | |
at org.apache.spark.sql.execution.command.AnalyzeColumnCommand.run(AnalyzeColumnCommand.scala:60) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.lambda$computeStatistics$1(AbstractCheckpointStrategy.java:80) | |
at java.util.Optional.ifPresent(Optional.java:159) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.computeStatistics(AbstractCheckpointStrategy.java:80) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.persist(AbstractCheckpointStrategy.java:49) | |
at com.bearingpoint.abacus.x1onspark.exec.Instruction.persist(Instruction.java:72) | |
at com.bearingpoint.abacus.x1onspark.exec.ProvideMissingColumns.execute(ProvideMissingColumns.java:134) | |
at com.bearingpoint.abacus.x1onspark.exec.ParallelInstruction.lambda$execute$0(ParallelInstruction.java:45) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: org.datanucleus.store.rdbms.exceptions.MappedDatastoreException: INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1078) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:220) | |
... 73 more | |
Caused by: java.sql.SQLDataException: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeLargeUpdate(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeUpdate(Unknown Source) | |
at com.jolbox.bonecp.PreparedStatementHandle.executeUpdate(PreparedStatementHandle.java:205) | |
at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeUpdate(ParamLoggingPreparedStatement.java:399) | |
at org.datanucleus.store.rdbms.SQLController.executeStatementUpdate(SQLController.java:439) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1069) | |
... 74 more | |
Caused by: ERROR 22001: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.types.SQLChar.hasNonBlankChars(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.DataTypeDescriptor.normalize(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeColumn(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeRow(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.DMLWriteResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.open(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) | |
... 82 more | |
Nested Throwables StackTrace: | |
org.datanucleus.store.rdbms.exceptions.MappedDatastoreException: INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1078) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:220) | |
at org.datanucleus.store.types.backed.HashMap.initialise(HashMap.java:203) | |
at org.datanucleus.store.types.SCOUtils.createSCOWrapper(SCOUtils.java:247) | |
at org.datanucleus.store.types.SCOUtils.newSCOInstance(SCOUtils.java:138) | |
at org.datanucleus.state.JDOStateManager.wrapSCOField(JDOStateManager.java:3627) | |
at org.datanucleus.state.JDOStateManager.setObjectField(JDOStateManager.java:1958) | |
at org.apache.hadoop.hive.metastore.model.MTable.jdoSetparameters(MTable.java) | |
at org.apache.hadoop.hive.metastore.model.MTable.setParameters(MTable.java:126) | |
at org.apache.hadoop.hive.metastore.ObjectStore.alterTable(ObjectStore.java:2767) | |
at sun.reflect.GeneratedMethodAccessor103.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) | |
at com.sun.proxy.$Proxy31.alterTable(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveAlterHandler.alterTable(HiveAlterHandler.java:243) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_core(HiveMetaStore.java:3408) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_with_cascade(HiveMetaStore.java:3380) | |
at sun.reflect.GeneratedMethodAccessor102.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107) | |
at com.sun.proxy.$Proxy33.alter_table_with_cascade(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.alter_table(HiveMetaStoreClient.java:340) | |
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.alter_table(SessionHiveMetaStoreClient.java:251) | |
at sun.reflect.GeneratedMethodAccessor101.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) | |
at com.sun.proxy.$Proxy34.alter_table(Unknown Source) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:496) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:484) | |
at sun.reflect.GeneratedMethodAccessor100.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.sql.hive.client.Shim_v0_12.alterTable(HiveShim.scala:401) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply$mcV$sp(HiveClientImpl.scala:492) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:272) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:210) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:209) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:255) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClient$class.alterTable(HiveClient.scala:95) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:83) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply$mcV$sp(HiveExternalCatalog.scala:680) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.doAlterTableStats(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.alterTableStats(ExternalCatalog.scala:187) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.alterTableStats(SessionCatalog.scala:373) | |
at org.apache.spark.sql.execution.command.AnalyzeColumnCommand.run(AnalyzeColumnCommand.scala:60) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.lambda$computeStatistics$1(AbstractCheckpointStrategy.java:80) | |
at java.util.Optional.ifPresent(Optional.java:159) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.computeStatistics(AbstractCheckpointStrategy.java:80) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.persist(AbstractCheckpointStrategy.java:49) | |
at com.bearingpoint.abacus.x1onspark.exec.Instruction.persist(Instruction.java:72) | |
at com.bearingpoint.abacus.x1onspark.exec.ProvideMissingColumns.execute(ProvideMissingColumns.java:134) | |
at com.bearingpoint.abacus.x1onspark.exec.ParallelInstruction.lambda$execute$0(ParallelInstruction.java:45) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: java.sql.SQLDataException: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeLargeUpdate(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeUpdate(Unknown Source) | |
at com.jolbox.bonecp.PreparedStatementHandle.executeUpdate(PreparedStatementHandle.java:205) | |
at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeUpdate(ParamLoggingPreparedStatement.java:399) | |
at org.datanucleus.store.rdbms.SQLController.executeStatementUpdate(SQLController.java:439) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1069) | |
... 74 more | |
Caused by: ERROR 22001: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.types.SQLChar.hasNonBlankChars(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.DataTypeDescriptor.normalize(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeColumn(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeRow(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.DMLWriteResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.open(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) | |
... 82 more | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:498) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:484) | |
at sun.reflect.GeneratedMethodAccessor100.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.sql.hive.client.Shim_v0_12.alterTable(HiveShim.scala:401) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply$mcV$sp(HiveClientImpl.scala:492) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:272) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:210) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:209) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:255) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClient$class.alterTable(HiveClient.scala:95) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:83) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply$mcV$sp(HiveExternalCatalog.scala:680) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) | |
... 25 more | |
Caused by: MetaException(message:Put request failed : INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
org.datanucleus.exceptions.NucleusDataStoreException: Put request failed : INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:230) | |
at org.datanucleus.store.types.backed.HashMap.initialise(HashMap.java:203) | |
at org.datanucleus.store.types.SCOUtils.createSCOWrapper(SCOUtils.java:247) | |
at org.datanucleus.store.types.SCOUtils.newSCOInstance(SCOUtils.java:138) | |
at org.datanucleus.state.JDOStateManager.wrapSCOField(JDOStateManager.java:3627) | |
at org.datanucleus.state.JDOStateManager.setObjectField(JDOStateManager.java:1958) | |
at org.apache.hadoop.hive.metastore.model.MTable.jdoSetparameters(MTable.java) | |
at org.apache.hadoop.hive.metastore.model.MTable.setParameters(MTable.java:126) | |
at org.apache.hadoop.hive.metastore.ObjectStore.alterTable(ObjectStore.java:2767) | |
at sun.reflect.GeneratedMethodAccessor103.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) | |
at com.sun.proxy.$Proxy31.alterTable(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveAlterHandler.alterTable(HiveAlterHandler.java:243) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_core(HiveMetaStore.java:3408) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_with_cascade(HiveMetaStore.java:3380) | |
at sun.reflect.GeneratedMethodAccessor102.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107) | |
at com.sun.proxy.$Proxy33.alter_table_with_cascade(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.alter_table(HiveMetaStoreClient.java:340) | |
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.alter_table(SessionHiveMetaStoreClient.java:251) | |
at sun.reflect.GeneratedMethodAccessor101.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) | |
at com.sun.proxy.$Proxy34.alter_table(Unknown Source) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:496) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:484) | |
at sun.reflect.GeneratedMethodAccessor100.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.sql.hive.client.Shim_v0_12.alterTable(HiveShim.scala:401) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply$mcV$sp(HiveClientImpl.scala:492) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:272) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:210) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:209) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:255) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClient$class.alterTable(HiveClient.scala:95) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:83) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply$mcV$sp(HiveExternalCatalog.scala:680) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.doAlterTableStats(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.alterTableStats(ExternalCatalog.scala:187) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.alterTableStats(SessionCatalog.scala:373) | |
at org.apache.spark.sql.execution.command.AnalyzeColumnCommand.run(AnalyzeColumnCommand.scala:60) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.lambda$computeStatistics$1(AbstractCheckpointStrategy.java:80) | |
at java.util.Optional.ifPresent(Optional.java:159) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.computeStatistics(AbstractCheckpointStrategy.java:80) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.persist(AbstractCheckpointStrategy.java:49) | |
at com.bearingpoint.abacus.x1onspark.exec.Instruction.persist(Instruction.java:72) | |
at com.bearingpoint.abacus.x1onspark.exec.ProvideMissingColumns.execute(ProvideMissingColumns.java:134) | |
at com.bearingpoint.abacus.x1onspark.exec.ParallelInstruction.lambda$execute$0(ParallelInstruction.java:45) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: org.datanucleus.store.rdbms.exceptions.MappedDatastoreException: INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1078) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:220) | |
... 73 more | |
Caused by: java.sql.SQLDataException: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeLargeUpdate(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeUpdate(Unknown Source) | |
at com.jolbox.bonecp.PreparedStatementHandle.executeUpdate(PreparedStatementHandle.java:205) | |
at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeUpdate(ParamLoggingPreparedStatement.java:399) | |
at org.datanucleus.store.rdbms.SQLController.executeStatementUpdate(SQLController.java:439) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1069) | |
... 74 more | |
Caused by: ERROR 22001: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.types.SQLChar.hasNonBlankChars(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.DataTypeDescriptor.normalize(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeColumn(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeRow(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.DMLWriteResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.open(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) | |
... 82 more | |
Nested Throwables StackTrace: | |
org.datanucleus.store.rdbms.exceptions.MappedDatastoreException: INSERT INTO TABLE_PARAMS (PARAM_VALUE,TBL_ID,PARAM_KEY) VALUES (?,?,?) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1078) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.putAll(JoinMapStore.java:220) | |
at org.datanucleus.store.types.backed.HashMap.initialise(HashMap.java:203) | |
at org.datanucleus.store.types.SCOUtils.createSCOWrapper(SCOUtils.java:247) | |
at org.datanucleus.store.types.SCOUtils.newSCOInstance(SCOUtils.java:138) | |
at org.datanucleus.state.JDOStateManager.wrapSCOField(JDOStateManager.java:3627) | |
at org.datanucleus.state.JDOStateManager.setObjectField(JDOStateManager.java:1958) | |
at org.apache.hadoop.hive.metastore.model.MTable.jdoSetparameters(MTable.java) | |
at org.apache.hadoop.hive.metastore.model.MTable.setParameters(MTable.java:126) | |
at org.apache.hadoop.hive.metastore.ObjectStore.alterTable(ObjectStore.java:2767) | |
at sun.reflect.GeneratedMethodAccessor103.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114) | |
at com.sun.proxy.$Proxy31.alterTable(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveAlterHandler.alterTable(HiveAlterHandler.java:243) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_core(HiveMetaStore.java:3408) | |
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.alter_table_with_cascade(HiveMetaStore.java:3380) | |
at sun.reflect.GeneratedMethodAccessor102.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107) | |
at com.sun.proxy.$Proxy33.alter_table_with_cascade(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.alter_table(HiveMetaStoreClient.java:340) | |
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.alter_table(SessionHiveMetaStoreClient.java:251) | |
at sun.reflect.GeneratedMethodAccessor101.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) | |
at com.sun.proxy.$Proxy34.alter_table(Unknown Source) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:496) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:484) | |
at sun.reflect.GeneratedMethodAccessor100.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.spark.sql.hive.client.Shim_v0_12.alterTable(HiveShim.scala:401) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply$mcV$sp(HiveClientImpl.scala:492) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$alterTable$1.apply(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:272) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:210) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:209) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:255) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:482) | |
at org.apache.spark.sql.hive.client.HiveClient$class.alterTable(HiveClient.scala:95) | |
at org.apache.spark.sql.hive.client.HiveClientImpl.alterTable(HiveClientImpl.scala:83) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply$mcV$sp(HiveExternalCatalog.scala:680) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$doAlterTableStats$1.apply(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97) | |
at org.apache.spark.sql.hive.HiveExternalCatalog.doAlterTableStats(HiveExternalCatalog.scala:662) | |
at org.apache.spark.sql.catalyst.catalog.ExternalCatalog.alterTableStats(ExternalCatalog.scala:187) | |
at org.apache.spark.sql.catalyst.catalog.SessionCatalog.alterTableStats(SessionCatalog.scala:373) | |
at org.apache.spark.sql.execution.command.AnalyzeColumnCommand.run(AnalyzeColumnCommand.scala:60) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68) | |
at org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$6.apply(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$$anonfun$52.apply(Dataset.scala:3253) | |
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77) | |
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3252) | |
at org.apache.spark.sql.Dataset.<init>(Dataset.scala:190) | |
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:75) | |
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:638) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.lambda$computeStatistics$1(AbstractCheckpointStrategy.java:80) | |
at java.util.Optional.ifPresent(Optional.java:159) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.computeStatistics(AbstractCheckpointStrategy.java:80) | |
at com.bearingpoint.abacus.x1onspark.exec.caching.AbstractCheckpointStrategy.persist(AbstractCheckpointStrategy.java:49) | |
at com.bearingpoint.abacus.x1onspark.exec.Instruction.persist(Instruction.java:72) | |
at com.bearingpoint.abacus.x1onspark.exec.ProvideMissingColumns.execute(ProvideMissingColumns.java:134) | |
at com.bearingpoint.abacus.x1onspark.exec.ParallelInstruction.lambda$execute$0(ParallelInstruction.java:45) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: java.sql.SQLDataException: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.Util.generateCsSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.wrapInSQLException(Unknown Source) | |
at org.apache.derby.impl.jdbc.TransactionResourceImpl.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedConnection.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.ConnectionChild.handleException(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeStatement(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeLargeUpdate(Unknown Source) | |
at org.apache.derby.impl.jdbc.EmbedPreparedStatement.executeUpdate(Unknown Source) | |
at com.jolbox.bonecp.PreparedStatementHandle.executeUpdate(PreparedStatementHandle.java:205) | |
at org.datanucleus.store.rdbms.ParamLoggingPreparedStatement.executeUpdate(ParamLoggingPreparedStatement.java:399) | |
at org.datanucleus.store.rdbms.SQLController.executeStatementUpdate(SQLController.java:439) | |
at org.datanucleus.store.rdbms.scostore.JoinMapStore.internalPut(JoinMapStore.java:1069) | |
... 74 more | |
Caused by: ERROR 22001: A truncation error was encountered trying to shrink VARCHAR 'TFo0QmxvY2smMQwAAOAXAABMl6MI8TlBBw+MWLFixgAAAP7Bn9+7oD1wpMEv&' to length 4000. | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.error.StandardException.newException(Unknown Source) | |
at org.apache.derby.iapi.types.SQLChar.hasNonBlankChars(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.SQLVarchar.normalize(Unknown Source) | |
at org.apache.derby.iapi.types.DataTypeDescriptor.normalize(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeColumn(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.normalizeRow(Unknown Source) | |
at org.apache.derby.impl.sql.execute.NormalizeResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.DMLWriteResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.getNextRowCore(Unknown Source) | |
at org.apache.derby.impl.sql.execute.InsertResultSet.open(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.executeStmt(Unknown Source) | |
at org.apache.derby.impl.sql.GenericPreparedStatement.execute(Unknown Source) | |
... 82 more | |
) | |
at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:168) | |
at com.sun.proxy.$Proxy33.alter_table_with_cascade(Unknown Source) | |
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.alter_table(HiveMetaStoreClient.java:340) | |
at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.alter_table(SessionHiveMetaStoreClient.java:251) | |
at sun.reflect.GeneratedMethodAccessor101.invoke(Unknown Source) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:498) | |
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156) | |
at com.sun.proxy.$Proxy34.alter_table(Unknown Source) | |
at org.apache.hadoop.hive.ql.metadata.Hive.alterTable(Hive.java:496) | |
... 44 more | |
Exception in thread "pool-21-thread-2" java.lang.NullPointerException | |
at com.bearingpoint.abacus.x1onspark.util.ExceptionHandlingExecutor.afterExecute(ExceptionHandlingExecutor.java:54) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1150) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) | |
at java.lang.Thread.run(Thread.java:745) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment