Error reading a table

Hi all,

I did a spark job that creates an “external table” and then inserts the data incrementally (are more or less 25000 files). When I read the table, I get this error:

2020-07-27 22:52:15,454 WARN io.hops.transaction.handler.RequestHandler: GET_LISTING TX Failed. TX Time: 1357 ms, RetryCount: 0, TX Stats -- Setup: 0ms, Acqui
reLocks: -1ms, InMemoryProcessing: -1ms, CommitTime: -1ms. Locks: INodeLock {paths=[/Projects/test/Resources/Warehouse/data/test/D_TABLE],
lockType=READ }. java.lang.OutOfMemoryError: Direct buffer memory
java.lang.OutOfMemoryError: Direct buffer memory
        at java.nio.Bits.reserveMemory(Bits.java:695)
        at java.nio.DirectByteBuffer.<init>(DirectByteBuffer.java:123)
        at java.nio.ByteBuffer.allocateDirect(ByteBuffer.java:311)
        at com.mysql.clusterj.tie.FixedByteBufferPoolImpl.borrowBuffer(FixedByteBufferPoolImpl.java:102)
        at com.mysql.clusterj.tie.NdbRecordImpl.newBuffer(NdbRecordImpl.java:299)
        at com.mysql.clusterj.tie.NdbRecordOperationImpl.allocateValueBuffer(NdbRecordOperationImpl.java:329)
        at com.mysql.clusterj.tie.NdbRecordScanOperationImpl.nextResultCopyOut(NdbRecordScanOperationImpl.java:232)
        at com.mysql.clusterj.tie.NdbRecordScanResultDataImpl.next(NdbRecordScanResultDataImpl.java:128)
        at com.mysql.clusterj.core.query.QueryDomainTypeImpl.getResultList(QueryDomainTypeImpl.java:183)
        at com.mysql.clusterj.core.query.QueryImpl.getResultList(QueryImpl.java:146)
        at io.hops.metadata.ndb.wrapper.HopsQuery.getResultList(HopsQuery.java:46)
        at io.hops.metadata.ndb.dalimpl.hdfs.INodeClusterj.findInodesByParentIdAndPartitionIdPPIS(INodeClusterj.java:332)
        at io.hops.metadata.adaptor.INodeDALAdaptor.findInodesByParentIdAndPartitionIdPPIS(INodeDALAdaptor.java:84)
        at io.hops.transaction.context.INodeContext.findByParentIdAndPartitionIdPPIS(INodeContext.java:311)
        at io.hops.transaction.context.INodeContext.findList(INodeContext.java:92)
        at io.hops.transaction.context.TransactionContext.findList(TransactionContext.java:150)
        at io.hops.transaction.EntityManager.findList(EntityManager.java:93)
        at org.apache.hadoop.hdfs.server.namenode.INodeDirectory.getChildren(INodeDirectory.java:589)
        at org.apache.hadoop.hdfs.server.namenode.INodeDirectory.getChildrenList(INodeDirectory.java:570)
        at io.hops.transaction.lock.INodeLock.findImmediateChildren(INodeLock.java:317)
        at io.hops.transaction.lock.INodeLock.acquirePathsINodeLocks(INodeLock.java:169)
        at io.hops.transaction.lock.INodeLock.acquire(INodeLock.java:107)
        at io.hops.transaction.lock.HdfsTransactionalLockAcquirer.acquire(HdfsTransactionalLockAcquirer.java:32)
        at io.hops.transaction.handler.TransactionalRequestHandler.execute(TransactionalRequestHandler.java:88)
        at io.hops.transaction.handler.HopsTransactionalRequestHandler.execute(HopsTransactionalRequestHandler.java:50)
        at io.hops.transaction.handler.RequestHandler.handle(RequestHandler.java:68)
        at io.hops.transaction.handler.RequestHandler.handle(RequestHandler.java:63)
        at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getListingInt(FSDirStatAndListingOp.java:110)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:4043)
        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:868)
        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:726)
        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:447)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:996)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:850)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:793)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1929)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2786)
2020-07-27 22:52:15,457 WARN org.apache.hadoop.ipc.Server: RPC IPC 111, call Call#12430 Retry#0 org.apache.hadoop.hdfs.protocol.ClientProtocol.getListing from 172.16.112.95:46586
java.lang.OutOfMemoryError: Direct buffer memory
        at java.nio.Bits.reserveMemory(Bits.java:695)
        at java.nio.DirectByteBuffer.<init>(DirectByteBuffer.java:123)
        at java.nio.DirectByteBuffer.<init>(DirectByteBuffer.java:123)
        at java.nio.ByteBuffer.allocateDirect(ByteBuffer.java:311)
        at com.mysql.clusterj.tie.FixedByteBufferPoolImpl.borrowBuffer(FixedByteBufferPoolImpl.java:102)
        at com.mysql.clusterj.tie.NdbRecordImpl.newBuffer(NdbRecordImpl.java:299)
        at com.mysql.clusterj.tie.NdbRecordOperationImpl.allocateValueBuffer(NdbRecordOperationImpl.java:329)
        at com.mysql.clusterj.tie.NdbRecordScanOperationImpl.nextResultCopyOut(NdbRecordScanOperationImpl.java:232)
        at com.mysql.clusterj.tie.NdbRecordScanResultDataImpl.next(NdbRecordScanResultDataImpl.java:128)
        at com.mysql.clusterj.core.query.QueryDomainTypeImpl.getResultList(QueryDomainTypeImpl.java:183)
        at com.mysql.clusterj.core.query.QueryImpl.getResultList(QueryImpl.java:146)
        at io.hops.metadata.ndb.wrapper.HopsQuery.getResultList(HopsQuery.java:46)
        at io.hops.metadata.ndb.dalimpl.hdfs.INodeClusterj.findInodesByParentIdAndPartitionIdPPIS(INodeClusterj.java:332)
        at io.hops.metadata.adaptor.INodeDALAdaptor.findInodesByParentIdAndPartitionIdPPIS(INodeDALAdaptor.java:84)
        at io.hops.transaction.context.INodeContext.findByParentIdAndPartitionIdPPIS(INodeContext.java:311)
        at io.hops.transaction.context.INodeContext.findList(INodeContext.java:92)
        at io.hops.transaction.context.TransactionContext.findList(TransactionContext.java:150)
        at io.hops.transaction.EntityManager.findList(EntityManager.java:93)
        at org.apache.hadoop.hdfs.server.namenode.INodeDirectory.getChildren(INodeDirectory.java:589)
        at org.apache.hadoop.hdfs.server.namenode.INodeDirectory.getChildrenList(INodeDirectory.java:570)
        at io.hops.transaction.lock.INodeLock.findImmediateChildren(INodeLock.java:317)
        at io.hops.transaction.lock.INodeLock.acquirePathsINodeLocks(INodeLock.java:169)
        at io.hops.transaction.lock.INodeLock.acquire(INodeLock.java:107)
        at io.hops.transaction.lock.HdfsTransactionalLockAcquirer.acquire(HdfsTransactionalLockAcquirer.java:32)
        at io.hops.transaction.handler.TransactionalRequestHandler.execute(TransactionalRequestHandler.java:88)
        at io.hops.transaction.handler.HopsTransactionalRequestHandler.execute(HopsTransactionalRequestHandler.java:50)
        at io.hops.transaction.handler.RequestHandler.handle(RequestHandler.java:68)
        at io.hops.transaction.handler.RequestHandler.handle(RequestHandler.java:63)
        at org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getListingInt(FSDirStatAndListingOp.java:110)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:4043)
        at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:868)
        at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:726)
        at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:447)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:996)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:850)
        at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:793)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:422)
        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1929)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2786)

Do you know how to solve this error?

Thanks so much,
regards

Antony

I have seen just now that my post is a duplicate of this:

Thanks a lot,
Antony

1 Like