@@ -1154,37 +1154,37 @@ private ref TEntry AddOrGetRefInEntries(K key, out bool found)
1154
1154
ref var h = ref hashesAndIndexes . GetSurePresentItemRef ( hashIndex ) ;
1155
1155
1156
1156
// 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
1157
- var probes = 1 ;
1158
- while ( ( h >>> ProbeCountShift ) >= probes )
1157
+ var probe = 1 ;
1158
+ while ( ( h >>> ProbeCountShift ) >= probe )
1159
1159
{
1160
1160
// 2. For the equal probes check for equality the hash middle part, and update the entry if the keys are equal too
1161
- if ( ( ( h >>> ProbeCountShift ) == probes ) & ( ( h & hashMiddleMask ) == hashMiddle ) )
1161
+ if ( ( ( h >>> ProbeCountShift ) == probe ) & ( ( h & hashMiddleMask ) == hashMiddle ) )
1162
1162
{
1163
1163
ref var e = ref GetSurePresentEntryRef ( h & indexMask ) ;
1164
1164
if ( found = default ( TEq ) . Equals ( e . Key , key ) )
1165
1165
return ref e ;
1166
1166
}
1167
1167
h = ref hashesAndIndexes . GetSurePresentItemRef ( ++ hashIndex & indexMask ) ;
1168
- ++ probes ;
1168
+ ++ probe ;
1169
1169
}
1170
1170
found = false ;
1171
1171
1172
1172
// 3. We did not find the hash and therefore the key, so insert the new entry
1173
1173
var hRobinHooded = h ;
1174
- h = ( probes << ProbeCountShift ) | hashMiddle | _count ;
1174
+ h = ( probe << ProbeCountShift ) | hashMiddle | _count ;
1175
1175
1176
1176
// 4. If the robin hooded hash is empty then we stop
1177
1177
// 5. Otherwise we steal the slot with the smaller probes
1178
- probes = hRobinHooded >>> ProbeCountShift ;
1178
+ probe = hRobinHooded >>> ProbeCountShift ;
1179
1179
while ( hRobinHooded != 0 )
1180
1180
{
1181
1181
h = ref hashesAndIndexes . GetSurePresentItemRef ( ++ hashIndex & indexMask ) ;
1182
- if ( ( h >>> ProbeCountShift ) < ++ probes )
1182
+ if ( ( h >>> ProbeCountShift ) < ++ probe )
1183
1183
{
1184
1184
var tmp = h ;
1185
- h = ( probes << ProbeCountShift ) | ( hRobinHooded & HashAndIndexMask ) ;
1185
+ h = ( probe << ProbeCountShift ) | ( hRobinHooded & HashAndIndexMask ) ;
1186
1186
hRobinHooded = tmp ;
1187
- probes = hRobinHooded >>> ProbeCountShift ;
1187
+ probe = hRobinHooded >>> ProbeCountShift ;
1188
1188
}
1189
1189
}
1190
1190
@@ -1202,29 +1202,29 @@ private void AddJustHashAndEntryIndexWithoutResizing(int hash, int index)
1202
1202
#endif
1203
1203
// 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
1204
1204
ref var h = ref hashesAndIndexes . GetSurePresentItemRef ( hashIndex ) ;
1205
- var probes = 1 ;
1206
- while ( ( h >>> ProbeCountShift ) >= probes )
1205
+ var probe = 1 ;
1206
+ while ( ( h >>> ProbeCountShift ) >= probe )
1207
1207
{
1208
1208
h = ref hashesAndIndexes . GetSurePresentItemRef ( ++ hashIndex & IndexMask ) ;
1209
- ++ probes ;
1209
+ ++ probe ;
1210
1210
}
1211
1211
1212
1212
// 3. We did not find the hash and therefore the key, so insert the new entry
1213
1213
var hRobinHooded = h ;
1214
- h = ( probes << ProbeCountShift ) | ( hash & HashMask ) | index ;
1214
+ h = ( probe << ProbeCountShift ) | ( hash & HashMask ) | index ;
1215
1215
1216
1216
// 4. If the robin hooded hash is empty then we stop
1217
1217
// 5. Otherwise we steal the slot with the smaller probes
1218
- probes = hRobinHooded >>> ProbeCountShift ;
1218
+ probe = hRobinHooded >>> ProbeCountShift ;
1219
1219
while ( hRobinHooded != 0 )
1220
1220
{
1221
1221
h = ref hashesAndIndexes . GetSurePresentItemRef ( ++ hashIndex & IndexMask ) ;
1222
- if ( ( h >>> ProbeCountShift ) < ++ probes )
1222
+ if ( ( h >>> ProbeCountShift ) < ++ probe )
1223
1223
{
1224
1224
var tmp = h ;
1225
- h = ( probes << ProbeCountShift ) | ( hRobinHooded & HashAndIndexMask ) ;
1225
+ h = ( probe << ProbeCountShift ) | ( hRobinHooded & HashAndIndexMask ) ;
1226
1226
hRobinHooded = tmp ;
1227
- probes = hRobinHooded >>> ProbeCountShift ;
1227
+ probe = hRobinHooded >>> ProbeCountShift ;
1228
1228
}
1229
1229
}
1230
1230
}
@@ -1282,25 +1282,25 @@ public ref TEntry AddOrGetEntryRef(K key, out bool found)
1282
1282
Index: 0 1 2 3 4 5 6 7
1283
1283
Hash: [0] [0] [0] [0] [0] [0] [0] [0]
1284
1284
1285
- 2. Insert that key A with the hash 13, which is 0b0011_0101. 13 & 7 Mask = 5, so the index is 5.
1285
+ 2. Insert the key A with the hash 13, which is 0b0011_0101. 13 & 7 Mask = 5, so the index is 5.
1286
1286
1287
1287
Index: 0 1 2 3 4 5 6 7
1288
1288
Hash: [0] [0] [0] [0] [0] [13] [0] [0]
1289
1289
Probe: 1A
1290
1290
1291
- 3. Insert that key B with the hash 5, which is 0b0000_1011. 5 & 7 Mask = 5, so the index is again 5.
1291
+ 3. Insert the key B with the hash 5, which is 0b0000_1011. 5 & 7 Mask = 5, so the index is again 5.
1292
1292
1293
1293
Index: 0 1 2 3 4 5 6 7
1294
1294
Hash: [0] [0] [0] [0] [0] [13] [5] [0]
1295
1295
Probe 1A 2B
1296
1296
1297
- 4. Insert that key C with the hash 7, which is 0b0010_0101. 7 & 7 Mask = 7, so the index is 7.
1297
+ 4. Insert the key C with the hash 7, which is 0b0010_0101. 7 & 7 Mask = 7, so the index is 7.
1298
1298
1299
1299
Index: 0 1 2 3 4 5 6 7
1300
1300
Hash: [0] [0] [0] [0] [0] [13] [5] [7]
1301
1301
Probe: 1A 2B 1C
1302
1302
1303
- 5. Insert that key D with the hash 21, which is 0b0101_0101. 21 & 7 Mask = 5, so the index is again again 5.
1303
+ 5. Insert the key D with the hash 21, which is 0b0101_0101. 21 & 7 Mask = 5, so the index is again again 5.
1304
1304
1305
1305
Index: 0 1 2 3 4 5 6 7
1306
1306
Hash: [7] [0] [0] [0] [0] [13] [5] [21]
@@ -1325,29 +1325,29 @@ private ref TEntry AddSureAbsentDefaultAndGetRefInEntries(K key)
1325
1325
ref var h = ref hashesAndIndexes . GetSurePresentItemRef ( hashIndex ) ;
1326
1326
1327
1327
// 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
1328
- var probes = 1 ;
1329
- while ( ( h >>> ProbeCountShift ) >= probes )
1328
+ var probe = 1 ;
1329
+ while ( ( h >>> ProbeCountShift ) >= probe )
1330
1330
{
1331
1331
h = ref hashesAndIndexes . GetSurePresentItemRef ( ++ hashIndex & indexMask ) ;
1332
- ++ probes ;
1332
+ ++ probe ;
1333
1333
}
1334
1334
1335
1335
// 3. We did not find the hash and therefore the key, so insert the new entry
1336
1336
var hRobinHooded = h ;
1337
- h = ( probes << ProbeCountShift ) | ( hash & HashAndIndexMask & ~ indexMask ) | _count ;
1337
+ h = ( probe << ProbeCountShift ) | ( hash & HashAndIndexMask & ~ indexMask ) | _count ;
1338
1338
1339
1339
// 4. If the robin hooded hash is empty then we stop
1340
1340
// 5. Otherwise we steal the slot with the smaller probes
1341
- probes = hRobinHooded >>> ProbeCountShift ;
1341
+ probe = hRobinHooded >>> ProbeCountShift ;
1342
1342
while ( hRobinHooded != 0 )
1343
1343
{
1344
1344
h = ref hashesAndIndexes . GetSurePresentItemRef ( ++ hashIndex & indexMask ) ;
1345
- if ( ( h >>> ProbeCountShift ) < ++ probes )
1345
+ if ( ( h >>> ProbeCountShift ) < ++ probe )
1346
1346
{
1347
1347
var tmp = h ;
1348
- h = ( probes << ProbeCountShift ) | ( hRobinHooded & HashAndIndexMask ) ;
1348
+ h = ( probe << ProbeCountShift ) | ( hRobinHooded & HashAndIndexMask ) ;
1349
1349
hRobinHooded = tmp ;
1350
- probes = hRobinHooded >>> ProbeCountShift ;
1350
+ probe = hRobinHooded >>> ProbeCountShift ;
1351
1351
}
1352
1352
}
1353
1353
@@ -1422,19 +1422,19 @@ internal ref TEntry TryGetRefInEntries(K key, out bool found)
1422
1422
var h = hashesAndIndexes . GetSurePresentItem ( hashIndex ) ;
1423
1423
1424
1424
// 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
1425
- var probes = 1 ;
1426
- while ( ( h >>> ProbeCountShift ) >= probes )
1425
+ var probe = 1 ;
1426
+ while ( ( h >>> ProbeCountShift ) >= probe )
1427
1427
{
1428
1428
// 2. For the equal probes check for equality the hash middle part, and update the entry if the keys are equal too
1429
- if ( ( ( h >>> ProbeCountShift ) == probes ) & ( ( h & hashMiddleMask ) == hashMiddle ) )
1429
+ if ( ( ( h >>> ProbeCountShift ) == probe ) & ( ( h & hashMiddleMask ) == hashMiddle ) )
1430
1430
{
1431
1431
ref var e = ref GetSurePresentEntryRef ( h & indexMask ) ;
1432
1432
if ( found = default ( TEq ) . Equals ( e . Key , key ) )
1433
1433
return ref e ;
1434
1434
}
1435
1435
1436
1436
h = hashesAndIndexes . GetSurePresentItem ( ++ hashIndex & indexMask ) ;
1437
- ++ probes ;
1437
+ ++ probe ;
1438
1438
}
1439
1439
1440
1440
found = false ;
@@ -1462,20 +1462,20 @@ internal ref TEntry TryGetRefInEntries2(K key, out bool found)
1462
1462
var h = hashesAndIndexes . GetSurePresentItem ( hashIndex ) ;
1463
1463
1464
1464
// 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
1465
- var probes = 1 ;
1465
+ var probe = 1 ;
1466
1466
1467
- while ( ( h >>> ProbeCountShift ) >= probes )
1467
+ while ( ( h >>> ProbeCountShift ) >= probe )
1468
1468
{
1469
1469
// 2. For the equal probes check for equality the hash middle part, then check the entry
1470
- if ( ( ( h >>> ProbeCountShift ) == probes ) & ( ( h & hashMiddleMask ) == hashMiddle ) )
1470
+ if ( ( ( h >>> ProbeCountShift ) == probe ) & ( ( h & hashMiddleMask ) == hashMiddle ) )
1471
1471
{
1472
1472
ref var e = ref GetSurePresentEntryRef ( h & indexMask ) ;
1473
1473
if ( found = default ( TEq ) . Equals ( e . Key , key ) )
1474
1474
return ref e ;
1475
1475
}
1476
1476
1477
1477
h = hashesAndIndexes . GetSurePresentItem ( ++ hashIndex & indexMask ) ;
1478
- ++ probes ;
1478
+ ++ probe ;
1479
1479
}
1480
1480
1481
1481
found = false ;
@@ -1523,14 +1523,14 @@ internal int ResizeHashes(int indexMask)
1523
1523
var indexWithNextBit = ( oldHash & oldCapacity ) | ( ( ( i + 1 ) - ( oldHash >>> ProbeCountShift ) ) & indexMask ) ;
1524
1524
1525
1525
// no need for robin-hooding because we already did it for the old hashes and now just filling the hashes into the new array which are already in order
1526
- var probes = 1 ;
1526
+ var probe = 1 ;
1527
1527
ref var newHash = ref newHashes . GetSurePresentItemRef ( indexWithNextBit ) ;
1528
1528
while ( newHash != 0 )
1529
1529
{
1530
1530
newHash = ref newHashes . GetSurePresentItemRef ( ++ indexWithNextBit & newIndexMask ) ;
1531
- ++ probes ;
1531
+ ++ probe ;
1532
1532
}
1533
- newHash = ( probes << ProbeCountShift ) | ( oldHash & newHashAndIndexMask ) ;
1533
+ newHash = ( probe << ProbeCountShift ) | ( oldHash & newHashAndIndexMask ) ;
1534
1534
}
1535
1535
if ( ++ i >= oldCapacityWithOverflowSegment )
1536
1536
break ;
0 commit comments