Changeset 46211 in vbox for trunk/src/VBox/Runtime/common
- Timestamp:
- May 22, 2013 11:00:02 AM (12 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/Runtime/common/string/strcache.cpp
r46208 r46211 60 60 #define RTSTRCACHE_COLLISION_INCR(uHashLen) ( ((uHashLen >> 8) | 1) ) 61 61 62 /** The initial hash table size. Must be power of two. */ 63 #define RTSTRCACHE_INITIAL_HASH_SIZE 512 64 /** The hash table growth factor. */ 65 #define RTSTRCACHE_HASH_GROW_FACTOR 4 66 62 67 /** 63 68 * The RTSTRCACHEENTRY size threshold at which we stop using our own allocator … … 73 78 /** The RTSTRCACHE_HEAP_THRESHOLD_BIT as a byte limit. */ 74 79 #define RTSTRCACHE_HEAP_THRESHOLD RT_BIT_32(RTSTRCACHE_HEAP_THRESHOLD_BIT) 80 /** Big (heap) entry size alignment. */ 81 #define RTSTRCACHE_HEAP_ENTRY_SIZE_ALIGN 16 75 82 76 83 #ifdef RTSTRCACHE_WITH_MERGED_ALLOCATOR … … 83 90 /** The number of bytes (power of two) that the merged allocation lists should 84 91 * be grown by. Must be much greater than RTSTRCACHE_MERGED_THRESHOLD. */ 85 # define RTSTRCACHE_MERGED_GROW_SIZE_32K92 # define RTSTRCACHE_MERGED_GROW_SIZE _32K 86 93 #endif 87 94 … … 91 98 92 99 /** The number of fixed sized lists. */ 93 #define RTSTRCACHE_NUM_OF_FIXED_SIZES 1 0100 #define RTSTRCACHE_NUM_OF_FIXED_SIZES 12 94 101 95 102 … … 264 271 /** List of big cache entries. */ 265 272 RTLISTANCHOR BigEntryList; 273 274 /** @name Statistics 275 * @{ */ 276 /** The total size of all chunks. */ 277 size_t cbChunks; 278 /** The total length of all the strings, terminators included. */ 279 size_t cbStrings; 280 /** The total size of all the big entries. */ 281 size_t cbBigEntries; 282 /** Hash collisions. */ 283 size_t cHashCollisions; 284 /** Secondary hash collisions. */ 285 size_t cHashCollisions2; 286 /** The number of inserts to compare cHashCollisions to. */ 287 size_t cHashInserts; 288 /** The number of rehashes. */ 289 uint32_t cRehashes; 290 /** @} */ 291 266 292 /** Critical section protecting the cache structures. */ 267 293 RTCRITSECT CritSect; … … 278 304 static const uint32_t g_acbFixedLists[RTSTRCACHE_NUM_OF_FIXED_SIZES] = 279 305 { 280 16, 32, 64, 128, 192, 256, 320, 384, 448, 512306 16, 32, 48, 64, 96, 128, 192, 256, 320, 384, 448, 512 281 307 }; 282 308 … … 301 327 if (pThis) 302 328 { 303 pThis->cHashTab = 512;329 pThis->cHashTab = RTSTRCACHE_INITIAL_HASH_SIZE; 304 330 pThis->papHashTab = (PRTSTRCACHEENTRY*)RTMemAllocZ(sizeof(pThis->papHashTab[0]) * pThis->cHashTab); 305 331 if (pThis->papHashTab) … … 370 396 371 397 372 373 398 /** 374 399 * Selects the fixed free list index for a given minimum entry size. … … 447 472 * Allocate a new hash table two times the size of the old one. 448 473 */ 449 uint32_t cNew = pThis->cHashTab * 2;474 uint32_t cNew = pThis->cHashTab * RTSTRCACHE_HASH_GROW_FACTOR; 450 475 PRTSTRCACHEENTRY *papNew = (PRTSTRCACHEENTRY *)RTMemAllocZ(sizeof(papNew[0]) * cNew); 451 476 if (papNew == NULL) … … 460 485 pThis->papHashTab = papNew; 461 486 pThis->cHashTab = cNew; 487 pThis->cRehashes++; 462 488 463 489 while (iOld-- > 0) … … 576 602 pChunk->pNext = pThis->pChunkList; 577 603 pThis->pChunkList = pChunk; 604 pThis->cbChunks += cbChunk; 578 605 AssertCompile(sizeof(*pChunk) <= sizeof(*pFree)); 579 606 … … 639 666 */ 640 667 size_t cbEntry = RT_UOFFSETOF(RTSTRCACHEBIGENTRY, Core.szString[cchString + 1]); 641 PRTSTRCACHEBIGENTRY pBigEntry = (PRTSTRCACHEBIGENTRY)RTMemAlloc(RT_ALIGN_Z(cbEntry, 64));668 PRTSTRCACHEBIGENTRY pBigEntry = (PRTSTRCACHEBIGENTRY)RTMemAlloc(RT_ALIGN_Z(cbEntry, RTSTRCACHE_HEAP_ENTRY_SIZE_ALIGN)); 642 669 if (!pBigEntry) 643 670 return NULL; … … 647 674 */ 648 675 RTListAppend(&pThis->BigEntryList, &pBigEntry->ListEntry); 676 pThis->cbBigEntries += cbEntry; 649 677 pBigEntry->cchString = cchString; 650 678 pBigEntry->uHash = uHash; … … 685 713 pChunk->pNext = pThis->pChunkList; 686 714 pThis->pChunkList = pChunk; 715 pThis->cbChunks += RTSTRCACHE_FIXED_GROW_SIZE; 687 716 688 717 PRTSTRCACHEFREE pPrev = NULL; … … 740 769 * is returned (same as what 741 770 * rtStrCacheFindEmptyHashTabEntry would return). 771 * @param pcCollisions Where to return a collision counter. 742 772 */ 743 773 static PRTSTRCACHEENTRY rtStrCacheLookUp(PRTSTRCACHEINT pThis, uint32_t uHashLen, uint32_t cchString, const char *pchString, 744 uint32_t *piFreeHashTabEntry )774 uint32_t *piFreeHashTabEntry, uint32_t *pcCollisions) 745 775 { 746 776 *piFreeHashTabEntry = UINT32_MAX; 777 *pcCollisions = 0; 747 778 748 779 uint16_t cchStringFirst = RT_UOFFSETOF(RTSTRCACHEENTRY, szString[cchString + 1]) < RTSTRCACHE_HEAP_THRESHOLD … … 781 812 } 782 813 } 814 815 if (*piFreeHashTabEntry == UINT32_MAX) 816 *pcCollisions += 1; 783 817 } 784 818 /* Record the first NIL index for insertion in case we don't get a hit. */ … … 810 844 RTSTRCACHE_CHECK(pThis); 811 845 846 uint32_t cCollisions; 812 847 uint32_t iFreeHashTabEntry; 813 PRTSTRCACHEENTRY pEntry = rtStrCacheLookUp(pThis, uHashLen, cchString32, pchString, &iFreeHashTabEntry );848 PRTSTRCACHEENTRY pEntry = rtStrCacheLookUp(pThis, uHashLen, cchString32, pchString, &iFreeHashTabEntry, &cCollisions); 814 849 if (pEntry) 815 850 { … … 851 886 pThis->papHashTab[iFreeHashTabEntry] = pEntry; 852 887 pThis->cStrings++; 888 pThis->cHashInserts++; 889 pThis->cHashCollisions += cCollisions > 0; 890 pThis->cHashCollisions2 += cCollisions > 1; 891 pThis->cbStrings += cchString32 + 1; 853 892 RTStrCacheRelease(hStrCache, pEntry->szString); 854 893 … … 861 900 pThis->papHashTab[iFreeHashTabEntry] = pEntry; 862 901 pThis->cStrings++; 902 pThis->cHashInserts++; 903 pThis->cHashCollisions += cCollisions > 0; 904 pThis->cHashCollisions2 += cCollisions > 1; 905 pThis->cbStrings += cchString32 + 1; 863 906 Assert(pThis->cStrings < pThis->cHashTab && pThis->cStrings > 0); 864 907 } … … 949 992 950 993 /* Remove it from the hash table. */ 951 uint32_t uHashLen = RT_MAKE_U32(pStr->uHash,952 pStr->cchString == RTSTRCACHEENTRY_BIG_LEN953 ? RT_FROM_MEMBER(pStr, RTSTRCACHEBIGENTRY, Core)->cchString954 : pStr->cchString);955 uint32_t iHash = uHashLen % pThis->cHashTab;994 uint32_t cchString = pStr->cchString == RTSTRCACHEENTRY_BIG_LEN 995 ? RT_FROM_MEMBER(pStr, RTSTRCACHEBIGENTRY, Core)->cchString 996 : pStr->cchString; 997 uint32_t uHashLen = RT_MAKE_U32(pStr->uHash, cchString); 998 uint32_t iHash = uHashLen % pThis->cHashTab; 956 999 if (pThis->papHashTab[iHash] == pStr) 957 1000 pThis->papHashTab[iHash] = PRTSTRCACHEENTRY_NIL; … … 978 1021 979 1022 pThis->cStrings--; 1023 pThis->cbStrings -= cchString; 980 1024 Assert(pThis->cStrings < pThis->cHashTab); 981 1025 … … 1087 1131 PRTSTRCACHEBIGENTRY pBigStr = RT_FROM_MEMBER(pStr, RTSTRCACHEBIGENTRY, Core); 1088 1132 RTListNodeRemove(&pBigStr->ListEntry); 1133 pThis->cbBigEntries -= RT_ALIGN_32(RT_UOFFSETOF(RTSTRCACHEBIGENTRY, Core.szString[cchString + 1]), 1134 RTSTRCACHE_HEAP_ENTRY_SIZE_ALIGN); 1089 1135 1090 1136 RTSTRCACHE_CHECK(pThis); … … 1146 1192 RT_EXPORT_SYMBOL(RTStrCacheIsRealImpl); 1147 1193 1194 1195 RTDECL(uint32_t) RTStrCacheGetStats(RTSTRCACHE hStrCache, size_t *pcbStrings, size_t *pcbChunks, size_t *pcbBigEntries, 1196 uint32_t *pcHashCollisions, uint32_t *pcHashCollisions2, uint32_t *pcHashInserts, 1197 uint32_t *pcRehashes) 1198 { 1199 PRTSTRCACHEINT pThis = hStrCache; 1200 RTSTRCACHE_VALID_RETURN_RC(pThis, UINT32_MAX); 1201 1202 RTCritSectEnter(&pThis->CritSect); 1203 1204 if (pcbStrings) 1205 *pcbStrings = pThis->cbStrings; 1206 if (pcbChunks) 1207 *pcbChunks = pThis->cbChunks; 1208 if (pcbBigEntries) 1209 *pcbBigEntries = pThis->cbBigEntries; 1210 if (pcHashCollisions) 1211 *pcHashCollisions = pThis->cHashCollisions; 1212 if (pcHashCollisions2) 1213 *pcHashCollisions2 = pThis->cHashCollisions2; 1214 if (pcHashInserts) 1215 *pcHashInserts = pThis->cHashInserts; 1216 if (pcRehashes) 1217 *pcRehashes = pThis->cRehashes; 1218 uint32_t cStrings = pThis->cStrings; 1219 1220 RTCritSectLeave(&pThis->CritSect); 1221 return cStrings; 1222 } 1223 RT_EXPORT_SYMBOL(RTStrCacheRelease); 1224
Note:
See TracChangeset
for help on using the changeset viewer.