Grokbase Groups Hive user July 2011
FAQ
Has anyone encountered the following exception? It is causing our SELECT queries to return incorrect results infrequently.

2011-07-06 13:46:40,225 WARN DataNucleus.Query (Log4JLogger.java:warn(106)) - Query for candidates of org.apache.hadoop.hive.metastore.model.MPartition and subclasses resulted in no possible candidates
Exception thrown obtaining schema column information from datastore
org.datanucleus.exceptions.NucleusDataStoreException: Exception thrown obtaining schema column information from datastore
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:991)
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:823)
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:772)
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:207)
at org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:1742)
at org.datanucleus.store.rdbms.table.TableImpl.initializeColumnInfoFromDatastore(TableImpl.java:330)
at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:2768)
at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:2546)
at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2191)
at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:113)
at org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:966)
at org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:914)
at org.datanucleus.store.mapped.MappedStoreManager.getDatastoreClass(MappedStoreManager.java:356)
at org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:298)
at org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:797)
at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:296)
at org.datanucleus.store.query.Query.executeQuery(Query.java:1643)
at org.datanucleus.store.query.Query.executeWithArray(Query.java:1514)
at org.datanucleus.jdo.JDOQuery.execute(JDOQuery.java:266)
at org.apache.hadoop.hive.metastore.ObjectStore.listPartitionNames(ObjectStore.java:921)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1363)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1360)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.executeWithRetry(HiveMetaStore.java:234)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partition_names(HiveMetaStore.java:1360)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionNames(HiveMetaStoreClient.java:653)
at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionNames(Hive.java:1244)
at org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(PartitionPruner.java:178)
at org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory$FilterPCR.process(PcrOpProcFactory.java:112)
at org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:128)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
at org.apache.hadoop.hive.ql.optimizer.pcr.PartitionConditionRemover.transform(PartitionConditionRemover.java:78)
at org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:85)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6609)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:238)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:686)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:149)
at org.apache.hadoop.hive.cli.CliDriver.processLineInternal(CliDriver.java:228)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:209)
at org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:256)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:368)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.hadoop.util.RunJar.main(RunJar.java:156)
Caused by: com.mysql.jdbc.exceptions.MySQLSyntaxErrorException: Table 'dataoven_prod_hadoop.DELETEME1309959999747' doesn't exist
at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1026)
at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)
at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3491)
at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3423)
at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1936)
at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2060)
at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2536)
at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465)
at com.mysql.jdbc.StatementImpl.executeQuery(StatementImpl.java:1386)
at com.mysql.jdbc.DatabaseMetaData$2.forEach(DatabaseMetaData.java:2471)
at com.mysql.jdbc.IterateBlock.doForAll(IterateBlock.java:50)
at com.mysql.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2589)
at org.apache.commons.dbcp.DelegatingDatabaseMetaData.getColumns(DelegatingDatabaseMetaData.java:218)
at org.datanucleus.store.rdbms.adapter.DatabaseAdapter.getColumns(DatabaseAdapter.java:1461)
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:924)
... 47 more
Nested Throwables StackTrace:
com.mysql.jdbc.exceptions.MySQLSyntaxErrorException: Table 'dataoven_prod_hadoop.DELETEME1309959999747' doesn't exist
at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1026)
at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)
at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3491)
at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3423)
at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1936)
at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2060)
at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2536)
at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465)
at com.mysql.jdbc.StatementImpl.executeQuery(StatementImpl.java:1386)
at com.mysql.jdbc.DatabaseMetaData$2.forEach(DatabaseMetaData.java:2471)
at com.mysql.jdbc.IterateBlock.doForAll(IterateBlock.java:50)
at com.mysql.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2589)
at org.apache.commons.dbcp.DelegatingDatabaseMetaData.getColumns(DelegatingDatabaseMetaData.java:218)
at org.datanucleus.store.rdbms.adapter.DatabaseAdapter.getColumns(DatabaseAdapter.java:1461)
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:924)
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:823)
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:772)
at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:207)
at org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:1742)
at org.datanucleus.store.rdbms.table.TableImpl.initializeColumnInfoFromDatastore(TableImpl.java:330)
at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:2768)
at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:2546)
at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2191)
at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:113)
at org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:966)
at org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:914)
at org.datanucleus.store.mapped.MappedStoreManager.getDatastoreClass(MappedStoreManager.java:356)
at org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:298)
at org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:797)
at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:296)
at org.datanucleus.store.query.Query.executeQuery(Query.java:1643)
at org.datanucleus.store.query.Query.executeWithArray(Query.java:1514)
at org.datanucleus.jdo.JDOQuery.execute(JDOQuery.java:266)
at org.apache.hadoop.hive.metastore.ObjectStore.listPartitionNames(ObjectStore.java:921)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1363)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1360)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.executeWithRetry(HiveMetaStore.java:234)
at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partition_names(HiveMetaStore.java:1360)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionNames(HiveMetaStoreClient.java:653)
at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionNames(Hive.java:1244)
at org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(PartitionPruner.java:178)
at org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory$FilterPCR.process(PcrOpProcFactory.java:112)
at org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:128)
at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
at org.apache.hadoop.hive.ql.optimizer.pcr.PartitionConditionRemover.transform(PartitionConditionRemover.java:78)
at org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:85)
at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6609)
at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:238)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:686)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:149)
at org.apache.hadoop.hive.cli.CliDriver.processLineInternal(CliDriver.java:228)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:209)
at org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:256)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:368)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.apache.hadoop.util.RunJar.main(RunJar.java:156)

Search Discussions

  • Edward Capriolo at Jul 10, 2011 at 2:42 am

    On Sat, Jul 9, 2011 at 10:24 PM, Steven Wong wrote:

    Has anyone encountered the following exception? It is causing our SELECT
    queries to return incorrect results infrequently.****

    ** **

    2011-07-06 13:46:40,225 WARN DataNucleus.Query
    (Log4JLogger.java:warn(106)) - Query for candidates of
    org.apache.hadoop.hive.metastore.model.MPartition and subclasses resulted in
    no possible candidates****

    Exception thrown obtaining schema column information from datastore****

    org.datanucleus.exceptions.NucleusDataStoreException: Exception thrown
    obtaining schema column information from datastore****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:991)
    ****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:823)
    ****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:772)
    ****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:207)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:1742)
    ****

    at
    org.datanucleus.store.rdbms.table.TableImpl.initializeColumnInfoFromDatastore(TableImpl.java:330)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:2768)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:2546)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2191)
    ****

    at
    org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:113)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:966)
    ****

    at
    org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:914)
    ****

    at
    org.datanucleus.store.mapped.MappedStoreManager.getDatastoreClass(MappedStoreManager.java:356)
    ****

    at
    org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:298)
    ****

    at
    org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:797)
    ****

    at
    org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:296)
    ****

    at org.datanucleus.store.query.Query.executeQuery(Query.java:1643)****

    at org.datanucleus.store.query.Query.executeWithArray(Query.java:1514)
    ****

    at org.datanucleus.jdo.JDOQuery.execute(JDOQuery.java:266)****

    at
    org.apache.hadoop.hive.metastore.ObjectStore.listPartitionNames(ObjectStore.java:921)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1363)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1360)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.executeWithRetry(HiveMetaStore.java:234)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partition_names(HiveMetaStore.java:1360)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionNames(HiveMetaStoreClient.java:653)
    ****

    at
    org.apache.hadoop.hive.ql.metadata.Hive.getPartitionNames(Hive.java:1244)*
    ***

    at
    org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(PartitionPruner.java:178)
    ****

    at
    org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory$FilterPCR.process(PcrOpProcFactory.java:112)
    ****

    at
    org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
    ****

    at
    org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
    ****

    at
    org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:128)
    ****

    at
    org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
    ****

    at
    org.apache.hadoop.hive.ql.optimizer.pcr.PartitionConditionRemover.transform(PartitionConditionRemover.java:78)
    ****

    at
    org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:85)*
    ***

    at
    org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6609)
    ****

    at
    org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:238)
    ****

    at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332)****

    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:686)****

    at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:149)
    ****

    at
    org.apache.hadoop.hive.cli.CliDriver.processLineInternal(CliDriver.java:228)
    ****

    at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:209)
    ****

    at
    org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:256)****

    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:368)****

    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)****

    at
    sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    ****

    at
    sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    ****

    at java.lang.reflect.Method.invoke(Method.java:597)****

    at org.apache.hadoop.util.RunJar.main(RunJar.java:156)****

    Caused by: com.mysql.jdbc.exceptions.MySQLSyntaxErrorException: Table
    'dataoven_prod_hadoop.DELETEME1309959999747' doesn't exist****

    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1026)****

    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)****

    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3491)****

    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3423)****

    at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1936)****

    at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2060)****

    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2536)****

    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465)****

    at com.mysql.jdbc.StatementImpl.executeQuery(StatementImpl.java:1386)*
    ***

    at
    com.mysql.jdbc.DatabaseMetaData$2.forEach(DatabaseMetaData.java:2471)****

    at com.mysql.jdbc.IterateBlock.doForAll(IterateBlock.java:50)****

    at
    com.mysql.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2589)****

    at
    org.apache.commons.dbcp.DelegatingDatabaseMetaData.getColumns(DelegatingDatabaseMetaData.java:218)
    ****

    at
    org.datanucleus.store.rdbms.adapter.DatabaseAdapter.getColumns(DatabaseAdapter.java:1461)
    ****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:924)
    ****

    ... 47 more****

    Nested Throwables StackTrace:****

    com.mysql.jdbc.exceptions.MySQLSyntaxErrorException: Table
    'dataoven_prod_hadoop.DELETEME1309959999747' doesn't exist****

    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1026)****

    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)****

    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3491)****

    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3423)****

    at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1936)****

    at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2060)****

    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2536)****

    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465)****

    at com.mysql.jdbc.StatementImpl.executeQuery(StatementImpl.java:1386)*
    ***

    at
    com.mysql.jdbc.DatabaseMetaData$2.forEach(DatabaseMetaData.java:2471)****

    at com.mysql.jdbc.IterateBlock.doForAll(IterateBlock.java:50)****

    at
    com.mysql.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2589)****

    at
    org.apache.commons.dbcp.DelegatingDatabaseMetaData.getColumns(DelegatingDatabaseMetaData.java:218)
    ****

    at
    org.datanucleus.store.rdbms.adapter.DatabaseAdapter.getColumns(DatabaseAdapter.java:1461)
    ****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:924)
    ****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:823)
    ****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:772)
    ****

    at
    org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:207)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:1742)
    ****

    at
    org.datanucleus.store.rdbms.table.TableImpl.initializeColumnInfoFromDatastore(TableImpl.java:330)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:2768)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:2546)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2191)
    ****

    at
    org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:113)
    ****

    at
    org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:966)
    ****

    at
    org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:914)
    ****

    at
    org.datanucleus.store.mapped.MappedStoreManager.getDatastoreClass(MappedStoreManager.java:356)
    ****

    at
    org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:298)
    ****

    at
    org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:797)
    ****

    at
    org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:296)
    ****

    at org.datanucleus.store.query.Query.executeQuery(Query.java:1643)****

    at org.datanucleus.store.query.Query.executeWithArray(Query.java:1514)
    ****

    at org.datanucleus.jdo.JDOQuery.execute(JDOQuery.java:266)****

    at
    org.apache.hadoop.hive.metastore.ObjectStore.listPartitionNames(ObjectStore.java:921)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1363)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1360)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.executeWithRetry(HiveMetaStore.java:234)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partition_names(HiveMetaStore.java:1360)
    ****

    at
    org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionNames(HiveMetaStoreClient.java:653)
    ****

    at
    org.apache.hadoop.hive.ql.metadata.Hive.getPartitionNames(Hive.java:1244)*
    ***

    at
    org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(PartitionPruner.java:178)
    ****

    at
    org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory$FilterPCR.process(PcrOpProcFactory.java:112)
    ****

    at
    org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
    ****

    at
    org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
    ****

    at
    org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:128)
    ****

    at
    org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
    ****

    at
    org.apache.hadoop.hive.ql.optimizer.pcr.PartitionConditionRemover.transform(PartitionConditionRemover.java:78)
    ****

    at
    org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:85)*
    ***

    at
    org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6609)
    ****

    at
    org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:238)
    ****

    at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332)****

    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:686)****

    at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:149)
    ****

    at
    org.apache.hadoop.hive.cli.CliDriver.processLineInternal(CliDriver.java:228)
    ****

    at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:209)
    ****

    at
    org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:256)****

    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:368)****

    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)****

    at
    sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    ****

    at
    sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    ****

    at java.lang.reflect.Method.invoke(Method.java:597)****

    at org.apache.hadoop.util.RunJar.main(RunJar.java:156)****

    ** **
    datanucleus creates these deleteme tables to test the waters. During the
    session it creates and deletes a table. It could be that your DB user does
    not have the proper permissions, or possibly transactional/innodb is not
    enable.
  • Hadoopman at Jul 11, 2011 at 3:02 am
    So we're seeing the following error during some of our hive loads:

    2011-07-05 12:26:52,927 Stage-2 map = 100%, reduce = 100%
    Ended Job = job_201106302113_3864
    Loading data to table default.merged_weblogs partition (day=null)
    Failed with exception Number of dynamic partitions created is 1013,
    which is more than 1000. To solve this try to set
    hive.exec.max.dynamic.partitions to at least 1013.
    FAILED: Execution Error, return code 1 from
    org.apache.hadoop.hive.ql.exec.MoveTask

    Here is a sample script we're running:

    SET hive.exec.dynamic.partition=true;
    SET hive.exec.dynamic.partition.mode=nonstrict;
    SET hive.exec.max.dynamic.partitions.pernode=10000;
    SET hive.exec.max.dynamic.partitions=10000;
    SET hive.exec.max.created.files=150000;

    SET hive.exec.compress.intermediate=true;
    SET hive.intermediate.compression.codec=com.hadoop.compression.lzo.LzoCodec;
    SET hive.intermediate.compression.type=BLOCK;
    SET mapred.map.output.compression.codec=com.hadoop.compression.lzo.LzoCodec;

    SET hive.exec.compress.output=true;
    SET mapred.output.compress=true;
    SET mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec;
    SET mapred.output.compression.type=BLOCK;

    FROM (
    SELECT hostname, name, ip, day
    FROM logsStaging
    UNION ALL
    SELECT hostname, name, ip, day
    FROM logs
    ) a

    INSERT OVERWRITE TABLE logs PARTITION(day)
    SELECT DISTINCT hostname, name, ip, day
    DISTRIBUTE BY day;

    QUIT;

    Has anyone run into this problem before? And I've noticed that
    increasing the number of partitions hasn't been working. Been looking
    for the config.xml setting already configured with 'final' in the
    properties but no go so far. I believe the default is 100 partitions
    and the job (when running) does show 10000 partitions (from the above
    script)

    thoughts on what else to look at?
    Thanks!
  • Steven Wong at Jul 12, 2011 at 6:14 am
    Bugs in DataNucleus and MySQL Connector! Any Hive user will potentially hit the problem.

    My SELECT query got incorrect results because JDOQLQuery.compileQueryFull swallowed the fatal datastore exception when it called RDBMSQueryUtils.getStatementForCandidates. This is a bug in RDBMSQueryUtils.getStatementForCandidates or its caller(s).

    The datastore exception was thrown because com.mysql.jdbc.DatabaseMetaData.getColumns has a race condition. It was called with tableNamePattern == null and because of that it went and fetched a list of all tables, and for each table, it executed a query to fetch table info. Before all table-info fetches were done, if a table were dropped by some other process, the fetch would fail and getColumns would throw an exception. This is a race condition bug.

    For now, I have enabled datanucleus.fixedDatastore in hive-site.xml to eliminate the use of DELTEME tables, virtually eliminating the problem. But the problem can still happen with non-DELETEME causes due to the 2 bugs described above or when datanucleus.fixedDatastore needs to be disabled occasionally.

    Someone familiar with DataNucleus or MySQL Connector should confirm or refute this....


    From: Edward Capriolo
    Sent: Saturday, July 09, 2011 7:42 PM
    To: user@hive.apache.org
    Subject: Re: Failures with DELETEME tables


    On Sat, Jul 9, 2011 at 10:24 PM, Steven Wong wrote:
    Has anyone encountered the following exception? It is causing our SELECT queries to return incorrect results infrequently.

    2011-07-06 13:46:40,225 WARN DataNucleus.Query (Log4JLogger.java:warn(106)) - Query for candidates of org.apache.hadoop.hive.metastore.model.MPartition and subclasses resulted in no possible candidates
    Exception thrown obtaining schema column information from datastore
    org.datanucleus.exceptions.NucleusDataStoreException: Exception thrown obtaining schema column information from datastore
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:991)
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:823)
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:772)
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:207)
    at org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:1742)
    at org.datanucleus.store.rdbms.table.TableImpl.initializeColumnInfoFromDatastore(TableImpl.java:330)
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:2768)
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:2546)
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2191)
    at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:113)
    at org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:966)
    at org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:914)
    at org.datanucleus.store.mapped.MappedStoreManager.getDatastoreClass(MappedStoreManager.java:356)
    at org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:298)
    at org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:797)
    at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:296)
    at org.datanucleus.store.query.Query.executeQuery(Query.java:1643)
    at org.datanucleus.store.query.Query.executeWithArray(Query.java:1514)
    at org.datanucleus.jdo.JDOQuery.execute(JDOQuery.java:266)
    at org.apache.hadoop.hive.metastore.ObjectStore.listPartitionNames(ObjectStore.java:921)
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1363)
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1360)
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.executeWithRetry(HiveMetaStore.java:234)
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partition_names(HiveMetaStore.java:1360)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionNames(HiveMetaStoreClient.java:653)
    at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionNames(Hive.java:1244)
    at org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(PartitionPruner.java:178)
    at org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory$FilterPCR.process(PcrOpProcFactory.java:112)
    at org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:128)
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
    at org.apache.hadoop.hive.ql.optimizer.pcr.PartitionConditionRemover.transform(PartitionConditionRemover.java:78)
    at org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:85)
    at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6609)
    at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:238)
    at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:686)
    at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:149)
    at org.apache.hadoop.hive.cli.CliDriver.processLineInternal(CliDriver.java:228)
    at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:209)
    at org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:256)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:368)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    at java.lang.reflect.Method.invoke(Method.java:597)
    at org.apache.hadoop.util.RunJar.main(RunJar.java:156)
    Caused by: com.mysql.jdbc.exceptions.MySQLSyntaxErrorException: Table 'dataoven_prod_hadoop.DELETEME1309959999747' doesn't exist
    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1026)
    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)
    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3491)
    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3423)
    at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1936)
    at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2060)
    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2536)
    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465)
    at com.mysql.jdbc.StatementImpl.executeQuery(StatementImpl.java:1386)
    at com.mysql.jdbc.DatabaseMetaData$2.forEach(DatabaseMetaData.java:2471)
    at com.mysql.jdbc.IterateBlock.doForAll(IterateBlock.java:50)
    at com.mysql.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2589)
    at org.apache.commons.dbcp.DelegatingDatabaseMetaData.getColumns(DelegatingDatabaseMetaData.java:218)
    at org.datanucleus.store.rdbms.adapter.DatabaseAdapter.getColumns(DatabaseAdapter.java:1461)
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:924)
    ... 47 more
    Nested Throwables StackTrace:
    com.mysql.jdbc.exceptions.MySQLSyntaxErrorException: Table 'dataoven_prod_hadoop.DELETEME1309959999747' doesn't exist
    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:1026)
    at com.mysql.jdbc.SQLError.createSQLException(SQLError.java:956)
    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3491)
    at com.mysql.jdbc.MysqlIO.checkErrorPacket(MysqlIO.java:3423)
    at com.mysql.jdbc.MysqlIO.sendCommand(MysqlIO.java:1936)
    at com.mysql.jdbc.MysqlIO.sqlQueryDirect(MysqlIO.java:2060)
    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2536)
    at com.mysql.jdbc.ConnectionImpl.execSQL(ConnectionImpl.java:2465)
    at com.mysql.jdbc.StatementImpl.executeQuery(StatementImpl.java:1386)
    at com.mysql.jdbc.DatabaseMetaData$2.forEach(DatabaseMetaData.java:2471)
    at com.mysql.jdbc.IterateBlock.doForAll(IterateBlock.java:50)
    at com.mysql.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2589)
    at org.apache.commons.dbcp.DelegatingDatabaseMetaData.getColumns(DelegatingDatabaseMetaData.java:218)
    at org.datanucleus.store.rdbms.adapter.DatabaseAdapter.getColumns(DatabaseAdapter.java:1461)
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:924)
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:823)
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:772)
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:207)
    at org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:1742)
    at org.datanucleus.store.rdbms.table.TableImpl.initializeColumnInfoFromDatastore(TableImpl.java:330)
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:2768)
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:2546)
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2191)
    at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:113)
    at org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:966)
    at org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:914)
    at org.datanucleus.store.mapped.MappedStoreManager.getDatastoreClass(MappedStoreManager.java:356)
    at org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:298)
    at org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:797)
    at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:296)
    at org.datanucleus.store.query.Query.executeQuery(Query.java:1643)
    at org.datanucleus.store.query.Query.executeWithArray(Query.java:1514)
    at org.datanucleus.jdo.JDOQuery.execute(JDOQuery.java:266)
    at org.apache.hadoop.hive.metastore.ObjectStore.listPartitionNames(ObjectStore.java:921)
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1363)
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler$22.run(HiveMetaStore.java:1360)
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.executeWithRetry(HiveMetaStore.java:234)
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partition_names(HiveMetaStore.java:1360)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionNames(HiveMetaStoreClient.java:653)
    at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionNames(Hive.java:1244)
    at org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner.prune(PartitionPruner.java:178)
    at org.apache.hadoop.hive.ql.optimizer.pcr.PcrOpProcFactory$FilterPCR.process(PcrOpProcFactory.java:112)
    at org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher.dispatch(DefaultRuleDispatcher.java:89)
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.dispatch(DefaultGraphWalker.java:88)
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.walk(DefaultGraphWalker.java:128)
    at org.apache.hadoop.hive.ql.lib.DefaultGraphWalker.startWalking(DefaultGraphWalker.java:102)
    at org.apache.hadoop.hive.ql.optimizer.pcr.PartitionConditionRemover.transform(PartitionConditionRemover.java:78)
    at org.apache.hadoop.hive.ql.optimizer.Optimizer.optimize(Optimizer.java:85)
    at org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:6609)
    at org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:238)
    at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:332)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:686)
    at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:149)
    at org.apache.hadoop.hive.cli.CliDriver.processLineInternal(CliDriver.java:228)
    at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:209)
    at org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:256)
    at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:368)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    at java.lang.reflect.Method.invoke(Method.java:597)
    at org.apache.hadoop.util.RunJar.main(RunJar.java:156)


    datanucleus creates these deleteme tables to test the waters. During the session it creates and deletes a table. It could be that your DB user does not have the proper permissions, or possibly transactional/innodb is not enable.

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupuser @
categorieshive, hadoop
postedJul 10, '11 at 2:25a
activeJul 12, '11 at 6:14a
posts4
users3
websitehive.apache.org

People

Translate

site design / logo © 2022 Grokbase