Details
-
Bug
-
Status: Resolved
-
Major
-
Resolution: Fixed
-
1.1.3
-
None
-
hbase version 1.1.3
-
Reviewed
-
Any value for hbase.bucketcache.bucket.sizes configuration to be multiple of 256. If that is not the case, instantiation of L2 Bucket cache itself will fail throwing IllegalArgumentException.
Description
hbase-site.xml setting
<property>
<name>hbase.bucketcache.bucket.sizes</name>
<value>16384,32768,40960, 46000,49152,51200,65536,131072,524288</value>
</property>
<property>
<name>hbase.bucketcache.size</name>
<value>16384</value>
</property>
<property>
<name>hbase.bucketcache.ioengine</name>
<value>offheap</value>
</property>
<property>
<name>hfile.block.cache.size</name>
<value>0.3</value>
</property>
<property>
<name>hfile.block.bloom.cacheonwrite</name>
<value>true</value>
</property>
<property>
<name>hbase.rs.cacheblocksonwrite</name>
<value>true</value>
</property>
<property>
<name>hfile.block.index.cacheonwrite</name>
<value>true</value>
</property
n_splits = 200
create 'usertable',{NAME =>'family', COMPRESSION => 'snappy', VERSIONS => 1,DATA_BLOCK_ENCODING => 'DIFF',CONFIGURATION => {'hbase.hregion.memstore.block.multiplier' => 5}},
,{SPLITS => (1..n_splits).map {|i| "user#
{1000+i*(9999-1000)/n_splits}"}}
load data
bin/ycsb load hbase10 -P workloads/workloada -p table=usertable -p columnfamily=family -p fieldcount=10 -p fieldlength=100 -p recordcount=200000000 -p insertorder=hashed -p insertstart=0 -p clientbuffering=true -p durability=SKIP_WAL -threads 20 -s
run
bin/ycsb run hbase10 -P workloads/workloadb -p table=usertable -p columnfamily=family -p fieldcount=10 -p fieldlength=100 -p operationcount=20000000 -p readallfields=true -p clientbuffering=true -p requestdistribution=zipfian -threads 10 -s
log info
2016-11-02 20:20:20,261 ERROR [RW.default.readRpcServer.handler=36,queue=21,port=6020] bucket.BucketCache: Failed reading block fdcc7ed6f3b2498b9ef316cc8206c233_44819759 from bucket cache
java.io.IOException: Invalid HFile block magic: \x00\x00\x00\x00\x00\x00\x00\x00
at org.apache.hadoop.hbase.io.hfile.BlockType.parse(BlockType.java:154)
at org.apache.hadoop.hbase.io.hfile.BlockType.read(BlockType.java:167)
at org.apache.hadoop.hbase.io.hfile.HFileBlock.<init>(HFileBlock.java:273)
at org.apache.hadoop.hbase.io.hfile.HFileBlock$1.deserialize(HFileBlock.java:134)
at org.apache.hadoop.hbase.io.hfile.HFileBlock$1.deserialize(HFileBlock.java:121)
at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.getBlock(BucketCache.java:427)
at org.apache.hadoop.hbase.io.hfile.CombinedBlockCache.getBlock(CombinedBlockCache.java:85)
at org.apache.hadoop.hbase.io.hfile.HFileReaderV2.getCachedBlock(HFileReaderV2.java:266)
at org.apache.hadoop.hbase.io.hfile.HFileReaderV2.readBlock(HFileReaderV2.java:403)
at org.apache.hadoop.hbase.io.hfile.HFileBlockIndex$BlockIndexReader.loadDataBlockWithScanInfo(HFileBlockIndex.java:269)
at org.apache.hadoop.hbase.io.hfile.HFileReaderV2$AbstractScannerV2.seekTo(HFileReaderV2.java:634)
at org.apache.hadoop.hbase.io.hfile.HFileReaderV2$AbstractScannerV2.seekTo(HFileReaderV2.java:584)
at org.apache.hadoop.hbase.regionserver.StoreFileScanner.seekAtOrAfter(StoreFileScanner.java:247)
at org.apache.hadoop.hbase.regionserver.StoreFileScanner.seek(StoreFileScanner.java:156)
at org.apache.hadoop.hbase.regionserver.StoreScanner.seekScanners(StoreScanner.java:363)
at org.apache.hadoop.hbase.regionserver.StoreScanner.<init>(StoreScanner.java:217)
at org.apache.hadoop.hbase.regionserver.HStore.getScanner(HStore.java:2071)
at org.apache.hadoop.hbase.regionserver.HRegion$RegionScannerImpl.<init>(HRegion.java:5369)
at org.apache.hadoop.hbase.regionserver.HRegion.instantiateRegionScanner(HRegion.java:2546)
at org.apache.hadoop.hbase.regionserver.HRegion.getScanner(HRegion.java:2532)
at org.apache.hadoop.hbase.regionserver.HRegion.getScanner(HRegion.java:2514)
at org.apache.hadoop.hbase.regionserver.HRegion.get(HRegion.java:6558)
at org.apache.hadoop.hbase.regionserver.HRegion.get(HRegion.java:6537)
at org.apache.hadoop.hbase.regionserver.RSRpcServices.get(RSRpcServices.java:1935)
at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32381)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2117)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
at java.lang.Thread.run(Thread.java:745)
2016-11-02 20:20:20,263 ERROR [RW.default.readRpcServer.handler=50,queue=20,port=6020] bucket.BucketCache: Failed reading block c45d6b14789546b785bae94c69c683d5_34198622 from bucket cache
java.io.IOException: Invalid HFile block magic: \x00\x00\x00\x00\x00\x00\x00\x00
at org.apache.hadoop.hbase.io.hfile.BlockType.parse(BlockType.java:154)
at org.apache.hadoop.hbase.io.hfile.BlockType.read(BlockType.java:167)
at org.apache.hadoop.hbase.io.hfile.HFileBlock.<init>(HFileBlock.java:273)
at org.apache.hadoop.hbase.io.hfile.HFileBlock$1.deserialize(HFileBlock.java:134)
at org.apache.hadoop.hbase.io.hfile.HFileBlock$1.deserialize(HFileBlock.java:121)
at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.getBlock(BucketCache.java:427)
at org.apache.hadoop.hbase.io.hfile.CombinedBlockCache.getBlock(CombinedBlockCache.java:85)
at org.apache.hadoop.hbase.io.hfile.HFileReaderV2.getCachedBlock(HFileReaderV2.java:266)
at org.apache.hadoop.hbase.io.hfile.HFileReaderV2.readBlock(HFileReaderV2.java:403)
at org.apache.hadoop.hbase.io.hfile.HFileBlockIndex$BlockIndexReader.loadDataBlockWithScanInfo(HFileBlockIndex.java:269)
at org.apache.hadoop.hbase.io.hfile.HFileReaderV2$AbstractScannerV2.seekTo(HFileReaderV2.java:634)
at org.apache.hadoop.hbase.io.hfile.HFileReaderV2$AbstractScannerV2.seekTo(HFileReaderV2.java:584)
at org.apache.hadoop.hbase.regionserver.StoreFileScanner.seekAtOrAfter(StoreFileScanner.java:247)
at org.apache.hadoop.hbase.regionserver.StoreFileScanner.seek(StoreFileScanner.java:156)
at org.apache.hadoop.hbase.regionserver.StoreScanner.seekScanners(StoreScanner.java:363)
at org.apache.hadoop.hbase.regionserver.StoreScanner.<init>(StoreScanner.java:217)
at org.apache.hadoop.hbase.regionserver.HStore.getScanner(HStore.java:2071)
at org.apache.hadoop.hbase.regionserver.HRegion$RegionScannerImpl.<init>(HRegion.java:5369)
at org.apache.hadoop.hbase.regionserver.HRegion.instantiateRegionScanner(HRegion.java:2546)
at org.apache.hadoop.hbase.regionserver.HRegion.getScanner(HRegion.java:2532)
at org.apache.hadoop.hbase.regionserver.HRegion.getScanner(HRegion.java:2514)
at org.apache.hadoop.hbase.regionserver.HRegion.get(HRegion.java:6558)
at org.apache.hadoop.hbase.regionserver.HRegion.get(HRegion.java:6537)
at org.apache.hadoop.hbase.regionserver.RSRpcServices.get(RSRpcServices.java:1935)
at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32381)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2117)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
at java.lang.Thread.run(Thread.java:745)
Attachments
Attachments
Issue Links
- is duplicated by
-
HBASE-15361 Remove unnecessary or Document constraints on BucketCache possible bucket sizes
- Resolved
- is related to
-
HBASE-15240 Go Big BucketCache Fixes
- Resolved