hash = basis for each octet_of_data to be hashed hash = hash * FNV_prime hash = hash xor octet_of_data return hash
hash = basis for each octet_of_data to be hashed hash = hash xor octet_of_data hash = hash * FNV_prime return hash
4445464748495051525354
HashType hashType = HashType.valueOf(hashConf); HashFunction fun = null; switch (hashType) { case FNV: fun = new FnvHashFunction(); break; case JENKINS: fun = new JenkinsHashFunction(); break; default:
5051525354555657585960
this.datastore = new DynamicDataStore(dataDirectory, initLevel, segmentFileSizeMB, segmentFactory, hashLoadFactor, new FnvHashFunction()); this.locks = new StripedLock(lockStripes); } catch(Exception e) { throw new VoldemortException("Failure initializing store.", e); }
171172173174175176177
StoreParams.NUM_SYNC_BATCHES_DEFAULT, StoreParams.SEGMENT_FILE_SIZE_MB_DEFAULT, segmentFactory, StoreParams.SEGMENT_COMPACT_FACTOR_DEFAULT, StoreParams.HASH_LOAD_FACTOR_DEFAULT, new FnvHashFunction()); }
235236237238239240241
StoreParams.NUM_SYNC_BATCHES_DEFAULT, segmentFileSizeMB, segmentFactory, StoreParams.SEGMENT_COMPACT_FACTOR_DEFAULT, StoreParams.HASH_LOAD_FACTOR_DEFAULT, new FnvHashFunction()); }
303304305306307308309
numSyncBatches, segmentFileSizeMB, segmentFactory, StoreParams.SEGMENT_COMPACT_FACTOR_DEFAULT, StoreParams.HASH_LOAD_FACTOR_DEFAULT, new FnvHashFunction()); }
132133134135136137138
StoreParams.BATCH_SIZE_DEFAULT, StoreParams.NUM_SYNC_BATCHES_DEFAULT, StoreParams.SEGMENT_FILE_SIZE_MB_DEFAULT, segmentFactory, StoreParams.SEGMENT_COMPACT_FACTOR_DEFAULT, new FnvHashFunction()); }
162163164165166167168
194195196197198199200
batchSize, numSyncBatches, segmentFileSizeMB, segmentFactory, StoreParams.SEGMENT_COMPACT_FACTOR_DEFAULT, new FnvHashFunction()); }
130131132133134135136
160161162163164165166