Changeset 109165 in vbox for trunk/src/VBox/Additions
- Timestamp:
- May 5, 2025 6:17:33 PM (4 days ago)
- svn:sync-xref-src-repo-rev:
- 168723
- Location:
- trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/VBoxMPTypes.h
r108641 r109165 5 5 6 6 /* 7 * Copyright (C) 2011-202 4Oracle and/or its affiliates.7 * Copyright (C) 2011-2025 Oracle and/or its affiliates. 8 8 * 9 9 * This file is part of VirtualBox base platform packages, as … … 206 206 uint32_t mobid; /* For surfaces and shaders. */ 207 207 uint32_t SegmentId; /* Segment of the allocation. */ 208 union 209 { 210 PMDL pMDL; /* Guest backing for aperture segment 2. */ 211 struct 212 { 213 struct VMSVGAMOB *pMob; /* Mob for the pages (including RTR0MEMOBJ). */ 214 } gb; /** @todo remove the struct */ 215 }; 208 struct VMSVGAGBO *pGbo; /* Guest memory for this allocation. */ 216 209 } dx; 217 210 #endif /* VBOX_WITH_VMSVGA3D_DX */ -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/Svga.cpp
r108786 r109165 5 5 6 6 /* 7 * Copyright (C) 2016-202 4Oracle and/or its affiliates.7 * Copyright (C) 2016-2025 Oracle and/or its affiliates. 8 8 * 9 9 * This file is part of VirtualBox base platform packages, as … … 61 61 62 62 for (uint32_t i = 0; i < RT_ELEMENTS(pSvga->aOT); ++i) 63 { 64 SvgaGboFree(&pSvga->aOT[i].gbo); 65 66 RTR0MemObjFree(pSvga->aOT[i].hMemObj, true); 67 pSvga->aOT[i].hMemObj = NIL_RTR0MEMOBJ; 68 } 63 SvgaGboUnreference(pSvga, &pSvga->aOT[i].pGbo); 69 64 70 65 return Status; … … 74 69 struct VMSVGAOTFREE 75 70 { 76 VMSVGAGBO gbo; 77 RTR0MEMOBJ hMemObj; 71 PVMSVGAGBO pGbo; 78 72 }; 79 73 … … 81 75 static DECLCALLBACK(void) svgaOTFreeCb(VBOXWDDM_EXT_VMSVGA *pSvga, void *pvData, uint32_t cbData) 82 76 { 83 RT_NOREF(pSvga);84 77 AssertReturnVoid(cbData == sizeof(struct VMSVGAOTFREE)); 85 78 struct VMSVGAOTFREE *p = (struct VMSVGAOTFREE *)pvData; 86 SvgaGboFree(&p->gbo); 87 RTR0MemObjFree(p->hMemObj, true); 79 SvgaGboUnreference(pSvga, &p->pGbo); 88 80 } 89 81 … … 94 86 uint32_t cMaxEntries; 95 87 } VMSVGAOTINFO, *PVMSVGAOTINFO; 96 97 88 98 89 static VMSVGAOTINFO const s_aOTInfo[SVGA_OTABLE_DX_MAX] = … … 133 124 cbOT *= 2; 134 125 135 /* Allocate pages for the new COTable. */ 136 RTR0MEMOBJ hMemObjOT; 137 int rc = RTR0MemObjAllocPageTag(&hMemObjOT, cbOT, false /* executable R0 mapping */, "VMSVGAOT"); 138 AssertRCReturn(rc, STATUS_INSUFFICIENT_RESOURCES); 139 140 memset(RTR0MemObjAddress(hMemObjOT), 0, cbOT); 141 142 /* Allocate a new gbo. */ 143 VMSVGAGBO gbo; 144 NTSTATUS Status = SvgaGboInit(&gbo, cbOT >> PAGE_SHIFT); 145 AssertReturnStmt(NT_SUCCESS(Status), 146 RTR0MemObjFree(hMemObjOT, true), 147 Status); 148 149 Status = SvgaGboFillPageTableForMemObj(&gbo, hMemObjOT); 150 AssertReturnStmt(NT_SUCCESS(Status), 151 SvgaGboFree(&gbo); RTR0MemObjFree(hMemObjOT, true), 152 Status); 126 PVMSVGAGBO pGbo; 127 NTSTATUS Status = SvgaGboCreate(pSvga, &pGbo, cbOT, "VMSVGAOT"); 128 AssertReturn(NT_SUCCESS(Status), Status); 129 130 memset(RTR0MemObjAddress(pGbo->hMemObj), 0, cbOT); 153 131 154 132 if (pOT->cEntries == 0) … … 160 138 SVGA3dCmdSetOTableBase64 *pCmd = (SVGA3dCmdSetOTableBase64 *)pvCmd; 161 139 pCmd->type = enmType; 162 pCmd->baseAddress = gbo.base;163 pCmd->sizeInBytes = gbo.cbGbo;140 pCmd->baseAddress = pGbo->base; 141 pCmd->sizeInBytes = pGbo->cbGbo; 164 142 pCmd->validSizeInBytes = 0; 165 pCmd->ptDepth = gbo.enmMobFormat;143 pCmd->ptDepth = pGbo->enmMobFormat; 166 144 167 145 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 168 146 } 169 147 else 170 AssertFailedReturnStmt(SvgaGbo Free(&gbo); RTR0MemObjFree(hMemObjOT, true),148 AssertFailedReturnStmt(SvgaGboUnreference(pSvga, &pGbo), 171 149 STATUS_INSUFFICIENT_RESOURCES); 172 150 } … … 179 157 SVGA3dCmdGrowOTable *pCmd = (SVGA3dCmdGrowOTable *)pvCmd; 180 158 pCmd->type = enmType; 181 pCmd->baseAddress = gbo.base;182 pCmd->sizeInBytes = gbo.cbGbo;159 pCmd->baseAddress = pGbo->base; 160 pCmd->sizeInBytes = pGbo->cbGbo; 183 161 pCmd->validSizeInBytes = pOT->cEntries * pOTInfo->cbEntry; 184 pCmd->ptDepth = gbo.enmMobFormat;162 pCmd->ptDepth = pGbo->enmMobFormat; 185 163 186 164 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 187 165 } 188 166 else 189 AssertFailedReturnStmt(SvgaGbo Free(&gbo); RTR0MemObjFree(hMemObjOT, true),167 AssertFailedReturnStmt(SvgaGboUnreference(pSvga, &pGbo), 190 168 STATUS_INSUFFICIENT_RESOURCES); 191 169 192 /* Command buffer completion callback to free the OT. */170 /* Command buffer completion callback to free the old OT. */ 193 171 struct VMSVGAOTFREE callbackData; 194 callbackData.gbo = pOT->gbo; 195 callbackData.hMemObj = pOT->hMemObj; 172 callbackData.pGbo = pOT->pGbo; 196 173 SvgaCmdBufSetCompletionCallback(pSvga, svgaOTFreeCb, &callbackData, sizeof(callbackData)); 197 174 198 memset(&pOT->gbo, 0, sizeof(pOT->gbo));199 pOT->hMemObj = NIL_RTR0MEMOBJ;200 } 201 175 pOT->pGbo = 0; 176 } 177 178 Assert(!pOT->pGbo); 202 179 SvgaCmdBufFlush(pSvga); 203 180 204 pOT->gbo = gbo; 205 pOT->hMemObj = hMemObjOT; 181 pOT->pGbo = pGbo; 206 182 pOT->cEntries = cbOT / pOTInfo->cbEntry; 207 183 … … 223 199 } 224 200 201 225 202 static NTSTATUS svgaCreateMiniportMob(VBOXWDDM_EXT_VMSVGA *pSvga) 226 203 { 227 NTSTATUS Status;228 229 204 uint32_t const cbMiniportMob = RT_ALIGN_32(sizeof(VMSVGAMINIPORTMOB), PAGE_SIZE); 230 RTR0MEMOBJ hMemObjMiniportMob; 231 int rc = RTR0MemObjAllocPageTag(&hMemObjMiniportMob, cbMiniportMob, 232 false /* executable R0 mapping */, "VMSVGAMOB0"); 233 if (RT_SUCCESS(rc)) 234 { 235 Status = SvgaMobCreate(pSvga, &pSvga->pMiniportMob, cbMiniportMob / PAGE_SIZE, 0); 205 206 NTSTATUS Status = SvgaGboCreate(pSvga, &pSvga->pMiniportGbo, cbMiniportMob, "VMSVGAMOB0"); 207 if (NT_SUCCESS(Status)) 208 { 209 Status = SvgaMobAlloc(pSvga, &pSvga->mobidMiniport, pSvga->pMiniportGbo); 236 210 if (NT_SUCCESS(Status)) 237 211 { 238 Status = SvgaMobSetMemObj(pSvga->pMiniportMob, hMemObjMiniportMob); 239 if (NT_SUCCESS(Status)) 212 uint32_t cbCmd = 0; 213 SvgaMobDefine(pSvga, SVGA3D_INVALID_ID, NULL, 0, &cbCmd); 214 void *pvCmd = SvgaCmdBufReserve(pSvga, cbCmd, SVGA3D_INVALID_ID); 215 if (pvCmd) 240 216 { 241 void *pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DEFINE_GB_MOB64, sizeof(SVGA3dCmdDefineGBMob64), SVGA3D_INVALID_ID); 242 if (pvCmd) 243 { 244 SVGA3dCmdDefineGBMob64 *pCmd = (SVGA3dCmdDefineGBMob64 *)pvCmd; 245 pCmd->mobid = VMSVGAMOB_ID(pSvga->pMiniportMob); 246 pCmd->ptDepth = pSvga->pMiniportMob->gbo.enmMobFormat; 247 pCmd->base = pSvga->pMiniportMob->gbo.base; 248 pCmd->sizeInBytes = pSvga->pMiniportMob->gbo.cbGbo; 249 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 250 251 pSvga->pMiniportMobData = (VMSVGAMINIPORTMOB volatile *)RTR0MemObjAddress(hMemObjMiniportMob); 252 memset((void *)pSvga->pMiniportMobData, 0, cbMiniportMob); 253 RTListInit(&pSvga->listMobDeferredDestruction); 254 //pSvga->u64MobFence = 0; 255 } 256 else 257 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 217 SvgaMobDefine(pSvga, pSvga->mobidMiniport, pvCmd, cbCmd, &cbCmd); 218 SvgaCmdBufCommit(pSvga, cbCmd); 219 220 pSvga->pMiniportMobData = (VMSVGAMINIPORTMOB volatile *)RTR0MemObjAddress(pSvga->pMiniportGbo->hMemObj); 221 memset((void *)pSvga->pMiniportMobData, 0, cbMiniportMob); 222 RTListInit(&pSvga->listMobDeferredDestruction); 223 //pSvga->u64MobFence = 0; 258 224 } 259 } 260 } 261 else 262 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 225 else 226 AssertFailedStmt(SvgaMobFree(pSvga, &pSvga->mobidMiniport); STATUS_INSUFFICIENT_RESOURCES); 227 } 228 else 229 SvgaGboUnreference(pSvga, &pSvga->pMiniportGbo); 230 } 263 231 264 232 return Status; … … 275 243 /* Wait for buffers to complete. Up to 5 seconds, arbitrary. */ 276 244 int cIntervals = 0; 277 while (!SvgaCmdBufIsIdle(pSvga) && cIntervals++ < 50)245 while (!SvgaCmdBufIsIdle(pSvga) && ASMAtomicReadS32(&pSvga->cQueuedWorkItems) && cIntervals++ < 50) 278 246 { 279 247 /* Give the host some time to process them. */ … … 303 271 pSvga->u32MaxWidth = SVGARegRead(pSvga, SVGA_REG_MAX_WIDTH); 304 272 pSvga->u32MaxHeight = SVGARegRead(pSvga, SVGA_REG_MAX_HEIGHT); 273 274 if (pSvga->u32Caps & SVGA_CAP_GBOBJECTS) 275 { 276 pSvga->u32MaxMobSize = SVGARegRead(pSvga, SVGA_REG_MOB_MAX_SIZE); 277 } 305 278 306 279 if (pSvga->u32Caps & SVGA_CAP_GMR2) … … 357 330 } 358 331 332 #ifdef DEBUG 333 static DECLCALLBACK(int) mobDumpCb(PAVLU32NODECORE pNode, void *pvUser); 334 #endif 335 359 336 void SvgaAdapterStop(PVBOXWDDM_EXT_VMSVGA pSvga, 360 337 DXGKRNL_INTERFACE *pDxgkInterface) … … 381 358 } 382 359 383 if (RT_BOOL(pSvga->u32Caps & SVGA_CAP_DX) )360 if (RT_BOOL(pSvga->u32Caps & SVGA_CAP_DX) && pSvga->mobidMiniport != SVGA3D_INVALID_ID) 384 361 { 385 362 /* Free the miniport mob at last. Can't use SvgaMobDestroy here because it tells the host to write a fence … … 389 366 { 390 367 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pvCmd; 391 pCmd->mobid = VMSVGAMOB_ID(pSvga->pMiniportMob);368 pCmd->mobid = pSvga->mobidMiniport; 392 369 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 393 370 } … … 399 376 400 377 if (RT_BOOL(pSvga->u32Caps & SVGA_CAP_DX)) 401 SvgaMobFree(pSvga, pSvga->pMiniportMob); /* After svgaHwStop because it waits for command buffer completion. */ 378 { 379 SvgaMobFree(pSvga, &pSvga->mobidMiniport); /* After svgaHwStop because it waits for command buffer completion. */ 380 SvgaGboUnreference(pSvga, &pSvga->pMiniportGbo); 381 } 402 382 403 383 Status = pDxgkInterface->DxgkCbUnmapMemory(pDxgkInterface->DeviceHandle, 404 384 (PVOID)pSvga->pu32FIFO); 405 385 Assert(Status == STATUS_SUCCESS); RT_NOREF(Status); 386 387 #ifdef DEBUG 388 uint32_t cMobs = 0; 389 RTAvlU32DoWithAll(&pSvga->MobTree, 0, mobDumpCb, &cMobs); 390 GALOG(("cMobs = %u\n", cMobs)); 391 Assert(cMobs == 0); 392 #endif 406 393 407 394 GaMemFree(pSvga); … … 435 422 // pSvga->MobTree = NULL; 436 423 RTListInit(&pSvga->DeletedHostObjectsList); 424 pSvga->mobidMiniport = SVGA3D_INVALID_ID; 437 425 438 426 /* The port IO address is also needed for hardware access. */ … … 1868 1856 typedef struct GAWDDMREGION 1869 1857 { 1870 /* Key is GMR id (equal to u32GmrId). */1858 /* Key is GMR id (equal to 'mobid'). */ 1871 1859 AVLU32NODECORE Core; 1872 1860 /* Pointer to a graphics context device (PVBOXWDDM_DEVICE) the GMR is associated with. */ 1873 1861 void *pvOwner; 1874 /* The ring-3 mapping memory object handle (from mob). */ 1862 /* Memory. */ 1863 PVMSVGAGBO pGbo; 1864 /* A corresponding MOB, which provides the GMR id to the host. */ 1865 SVGAMobId mobid; 1866 /* The ring-3 mapping memory object handle. */ 1875 1867 RTR0MEMOBJ MapObjR3; 1876 1868 RTR3PTR pvR3; 1877 /* A corresponding MOB, which provides the GMR id and RTR0MEMOBJ for the region memory. */ 1878 PVMSVGAMOB pMob; 1879 /* The allocated size in pages. */ 1869 /* The allocated size in pages and number of elements in the aPhys array. */ 1880 1870 uint32_t u32NumPages; 1881 1871 /* Physical addresses of the pages. */ … … 1884 1874 1885 1875 1876 /* Deallocate gmr. 1877 */ 1878 static void gmrFreeMemory(VBOXWDDM_EXT_VMSVGA *pSvga, GAWDDMREGION *pRegion) 1879 { 1880 if (pRegion->MapObjR3 != NIL_RTR0MEMOBJ) 1881 { 1882 int rc = RTR0MemObjFree(pRegion->MapObjR3, false); 1883 AssertRC(rc); 1884 pRegion->MapObjR3 = NIL_RTR0MEMOBJ; 1885 } 1886 1887 SvgaMobFree(pSvga, &pRegion->mobid); 1888 1889 SvgaGboUnreference(pSvga, &pRegion->pGbo); 1890 } 1891 1886 1892 /* Allocate memory pages and the corresponding mob. 1887 1893 */ … … 1890 1896 NTSTATUS Status; 1891 1897 1892 /* Allocate memory. */ 1893 RTR0MEMOBJ MemObj; 1894 int rc = RTR0MemObjAllocPageTag(&MemObj, u32NumPages << PAGE_SHIFT, 1895 false /* executable R0 mapping */, "VMSVGAGMR"); 1896 AssertRC(rc); 1897 if (RT_SUCCESS(rc)) 1898 { 1899 if (!RTR0MemObjWasZeroInitialized(MemObj)) 1900 RT_BZERO(RTR0MemObjAddress(MemObj), (size_t)u32NumPages << PAGE_SHIFT); 1901 1902 /* Allocate corresponding mob. */ 1903 Status = SvgaMobCreate(pSvga, &pRegion->pMob, u32NumPages, 0); 1904 Assert(NT_SUCCESS(Status)); 1905 if (NT_SUCCESS(Status)) 1906 { 1907 Status = SvgaMobSetMemObj(pRegion->pMob, MemObj); 1898 Status = SvgaGboCreate(pSvga, &pRegion->pGbo, u32NumPages << PAGE_SHIFT, "VMSVGAGMR"); 1899 1900 if (NT_SUCCESS(Status)) 1901 { 1902 int rc = RTR0MemObjMapUser(&pRegion->MapObjR3, pRegion->pGbo->hMemObj, (RTR3PTR)-1, 0, 1903 RTMEM_PROT_WRITE | RTMEM_PROT_READ, NIL_RTR0PROCESS); 1904 AssertRC(rc); 1905 if (RT_SUCCESS(rc)) 1906 { 1907 if (!RTR0MemObjWasZeroInitialized(pRegion->pGbo->hMemObj)) 1908 RT_BZERO(RTR0MemObjAddress(pRegion->pGbo->hMemObj), (size_t)u32NumPages << PAGE_SHIFT); 1909 1910 /* Allocate corresponding mob. */ 1911 Status = SvgaMobAlloc(pSvga, &pRegion->mobid, pRegion->pGbo); 1908 1912 Assert(NT_SUCCESS(Status)); 1909 1913 if (NT_SUCCESS(Status)) 1914 { 1915 /* 1916 * Success. 1917 */ 1910 1918 return STATUS_SUCCESS; 1911 } 1912 1913 if ( pRegion->pMob 1914 && pRegion->pMob->hMemObj == NIL_RTR0MEMOBJ) 1915 { 1916 /* The memory object has not been assigned to the mob yet. Clean up the local object. 1917 * Otherwise the caller will clean up. 1918 */ 1919 int rc2 = RTR0MemObjFree(MemObj, false); 1920 AssertRC(rc2); 1921 } 1919 } 1920 } 1921 else 1922 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 1923 1924 /* 1925 * Cleanup. 1926 */ 1927 gmrFreeMemory(pSvga, pRegion); 1922 1928 } 1923 1929 else … … 1934 1940 if (NT_SUCCESS(Status)) 1935 1941 { 1936 int rc = RTR0MemObjMapUser(&pRegion->MapObjR3, pRegion->pMob->hMemObj, (RTR3PTR)-1, 0, 1937 RTMEM_PROT_WRITE | RTMEM_PROT_READ, NIL_RTR0PROCESS); 1938 AssertRC(rc); 1939 if (RT_SUCCESS(rc)) 1940 { 1941 uint32_t iPage; 1942 for (iPage = 0; iPage < u32NumPages; ++iPage) 1943 pRegion->aPhys[iPage] = RTR0MemObjGetPagePhysAddr(pRegion->pMob->hMemObj, iPage); 1944 1945 pRegion->pvR3 = RTR0MemObjAddressR3(pRegion->MapObjR3); 1946 1947 pRegion->pvOwner = pvOwner; 1948 pRegion->u32NumPages = u32NumPages; 1949 } 1950 else 1951 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 1942 uint32_t iPage; 1943 for (iPage = 0; iPage < u32NumPages; ++iPage) 1944 pRegion->aPhys[iPage] = RTR0MemObjGetPagePhysAddr(pRegion->pGbo->hMemObj, iPage); 1945 1946 pRegion->pvR3 = RTR0MemObjAddressR3(pRegion->MapObjR3); 1947 1948 pRegion->pvOwner = pvOwner; 1949 pRegion->u32NumPages = u32NumPages; 1952 1950 } 1953 1951 … … 1983 1981 { 1984 1982 SVGAFifoCmdDefineGMR2 *pCmd = (SVGAFifoCmdDefineGMR2 *)pu8Cmd; 1985 pCmd->gmrId = VMSVGAMOB_ID(pRegion->pMob);1983 pCmd->gmrId = pRegion->mobid; 1986 1984 pCmd->numPages = pRegion->u32NumPages; 1987 1985 pu8Cmd += sizeof(*pCmd); … … 1994 1992 { 1995 1993 SVGAFifoCmdRemapGMR2 *pCmd = (SVGAFifoCmdRemapGMR2 *)pu8Cmd; 1996 pCmd->gmrId = VMSVGAMOB_ID(pRegion->pMob);1994 pCmd->gmrId = pRegion->mobid; 1997 1995 pCmd->flags = SVGA_REMAP_GMR2_PPN64; 1998 1996 pCmd->offsetPages = 0; … … 2020 2018 { 2021 2019 SVGA3dCmdDefineGBMob64 *pCmd = (SVGA3dCmdDefineGBMob64 *)pu8Cmd; 2022 pCmd->mobid = VMSVGAMOB_ID(pRegion->pMob);2023 pCmd->ptDepth = pRegion->p Mob->gbo.enmMobFormat;2024 pCmd->base = pRegion->p Mob->gbo.base;2025 pCmd->sizeInBytes = pRegion->p Mob->gbo.cbGbo;2020 pCmd->mobid = pRegion->mobid; 2021 pCmd->ptDepth = pRegion->pGbo->enmMobFormat; 2022 pCmd->base = pRegion->pGbo->base; 2023 pCmd->sizeInBytes = pRegion->pGbo->cbGbo; 2026 2024 pu8Cmd += sizeof(*pCmd); 2027 2025 } … … 2040 2038 AssertReturn(pRegion, STATUS_INVALID_PARAMETER); 2041 2039 2042 /* Mapping must be freed prior to the mob destruction. Otherwise, due to a race condition,2043 * SvgaMobFree could free the mapping in a system worker thread after DPC, which would not2044 * work obviously, because the mapping was created for another process.2045 */2046 if (pRegion->MapObjR3 != NIL_RTR0MEMOBJ)2047 {2048 int rc = RTR0MemObjFree(pRegion->MapObjR3, false);2049 AssertRC(rc);2050 pRegion->MapObjR3 = NIL_RTR0MEMOBJ;2051 }2052 2053 2040 /* Issue commands to delete the gmr. */ 2054 2041 uint32_t cbRequired = 0; 2055 SvgaMobDestroy(pSvga, pRegion->pMob, NULL, 0, &cbRequired); 2042 if (RT_BOOL(pSvga->u32Caps & SVGA_CAP_DX)) 2043 SvgaMobDestroy(pSvga, 0, NULL, 0, &cbRequired); 2056 2044 cbRequired += sizeof(uint32_t) + sizeof(SVGAFifoCmdDefineGMR2); 2057 2045 … … 2068 2056 2069 2057 SVGAFifoCmdDefineGMR2 *pCmd = (SVGAFifoCmdDefineGMR2 *)pu8Cmd; 2070 pCmd->gmrId = VMSVGAMOB_ID(pRegion->pMob);2058 pCmd->gmrId = pRegion->mobid; 2071 2059 pCmd->numPages = 0; 2072 2060 pu8Cmd += sizeof(*pCmd); 2073 2061 2074 uint32_t cbCmd = 0; 2075 NTSTATUS Status = SvgaMobDestroy(pSvga, pRegion->pMob, pu8Cmd, 2076 cbRequired - ((uintptr_t)pu8Cmd - (uintptr_t)pvCmd), 2077 &cbCmd); 2078 AssertReturn(NT_SUCCESS(Status), Status); 2079 pu8Cmd += cbCmd; 2062 if (RT_BOOL(pSvga->u32Caps & SVGA_CAP_DX)) 2063 { 2064 uint32_t cbCmd = 0; 2065 NTSTATUS Status = SvgaMobDestroy(pSvga, pRegion->mobid, pu8Cmd, 2066 cbRequired - ((uintptr_t)pu8Cmd - (uintptr_t)pvCmd), 2067 &cbCmd); 2068 AssertReturn(NT_SUCCESS(Status), Status); 2069 pu8Cmd += cbCmd; 2070 } 2080 2071 2081 2072 Assert(((uintptr_t)pu8Cmd - (uintptr_t)pvCmd) == cbRequired); 2082 2073 SvgaCmdBufCommit(pSvga, ((uintptr_t)pu8Cmd - (uintptr_t)pvCmd)); 2083 2084 /* The mob will be deleted in DPC routine after host reports completion of the above commands. */2085 pRegion->pMob = NULL;2086 2074 2087 2075 #ifdef DEBUG … … 2089 2077 ASMAtomicSubU32(&pSvga->cAllocatedGmrPages, pRegion->u32NumPages); 2090 2078 #endif 2079 2080 /* The mob id will be deleted in DPC routine after host reports completion of the above commands. */ 2081 pRegion->mobid = SVGA3D_INVALID_ID; 2082 gmrFreeMemory(pSvga, pRegion); 2083 2091 2084 GaMemFree(pRegion); 2092 2085 return STATUS_SUCCESS; … … 2110 2103 { 2111 2104 AssertReturn(pCtx->cIds < pCtx->cMaxIds, -1); 2112 pCtx->au32Ids[pCtx->cIds++] = VMSVGAMOB_ID(pRegion->pMob);2105 pCtx->au32Ids[pCtx->cIds++] = pRegion->mobid; 2113 2106 } 2114 2107 return 0; … … 2142 2135 if (pRegion) 2143 2136 { 2144 Assert( VMSVGAMOB_ID(pRegion->pMob)== pCtx->au32Ids[i]);2137 Assert(pRegion->mobid == pCtx->au32Ids[i]); 2145 2138 GALOG(("Deallocate gmrId %d, pv %p, aPhys[0] %RHp\n", 2146 VMSVGAMOB_ID(pRegion->pMob), pRegion->pvR3, pRegion->aPhys[0]));2139 pRegion->mobid, pRegion->pvR3, pRegion->aPhys[0])); 2147 2140 2148 2141 gmrDestroy(pSvga, pRegion); … … 2168 2161 AssertReturn(pRegion, STATUS_INVALID_PARAMETER); 2169 2162 2170 Assert( VMSVGAMOB_ID(pRegion->pMob)== u32GmrId);2163 Assert(pRegion->mobid == u32GmrId); 2171 2164 GALOG(("Freed gmrId %d, pv %p, aPhys[0] %RHp\n", 2172 VMSVGAMOB_ID(pRegion->pMob), pRegion->pvR3, pRegion->aPhys[0]));2165 pRegion->mobid, pRegion->pvR3, pRegion->aPhys[0])); 2173 2166 2174 2167 return gmrDestroy(pSvga, pRegion); … … 2192 2185 AssertReturn(pRegion, STATUS_INVALID_PARAMETER); 2193 2186 2194 Assert( VMSVGAMOB_ID(pRegion->pMob)== u32GmrId);2187 Assert(pRegion->mobid == u32GmrId); 2195 2188 GALOG(("Get gmrId %d, UserAddress 0x%p\n", 2196 VMSVGAMOB_ID(pRegion->pMob), pRegion->pvR3));2189 pRegion->mobid, pRegion->pvR3)); 2197 2190 *pu64UserAddress = (uintptr_t)pRegion->pvR3; 2198 2191 *pu32Size = pRegion->u32NumPages * PAGE_SIZE; … … 2216 2209 if (pRegion) 2217 2210 { 2211 pRegion->mobid = SVGA3D_INVALID_ID; 2212 2218 2213 /* Region id and VGPU10+ mobid are the same. So a mob is always allocated for the gmr. 2219 2214 * The mob provides an id and, if SVGA_CAP_DX is available, is reported to the host on VGPU10. … … 2225 2220 if (NT_SUCCESS(Status)) 2226 2221 { 2227 if ( VMSVGAMOB_ID(pRegion->pMob)< pSvga->u32GmrMaxIds)2222 if (pRegion->mobid < pSvga->u32GmrMaxIds) 2228 2223 { 2229 2224 GALOG(("Allocated gmrId %d, pv %p, aPhys[0] %RHp\n", 2230 VMSVGAMOB_ID(pRegion->pMob), pRegion->pvR3, pRegion->aPhys[0]));2225 pRegion->mobid, pRegion->pvR3, pRegion->aPhys[0])); 2231 2226 2232 2227 Status = gmrReportToHost(pSvga, pRegion); … … 2234 2229 if (NT_SUCCESS(Status)) 2235 2230 { 2236 pRegion->Core.Key = VMSVGAMOB_ID(pRegion->pMob);2231 pRegion->Core.Key = pRegion->mobid; 2237 2232 2238 2233 ExAcquireFastMutex(&pSvga->SvgaMutex); … … 2240 2235 ExReleaseFastMutex(&pSvga->SvgaMutex); 2241 2236 2242 *pu32GmrId = VMSVGAMOB_ID(pRegion->pMob);2237 *pu32GmrId = pRegion->mobid; 2243 2238 *pu64UserAddress = (uint64_t)pRegion->pvR3; 2244 2239 … … 2263 2258 } 2264 2259 2265 SvgaMobFree(pSvga, pRegion->pMob); 2266 pRegion->pMob = NULL; 2260 SvgaMobFree(pSvga, &pRegion->mobid); 2267 2261 2268 2262 GaMemFree(pRegion); … … 2336 2330 */ 2337 2331 2338 void SvgaGboFree(VMSVGAGBO *pGbo) 2339 { 2332 void SvgaGboFree(VBOXWDDM_EXT_VMSVGA *pSvga, 2333 VMSVGAGBO *pGbo) 2334 { 2335 RT_NOREF(pSvga); 2336 2337 GALOG(("gbo = %p\n", pGbo)); 2338 if (!pGbo->flags.fMdl) 2339 { 2340 if (pGbo->hMemObj != NIL_RTR0MEMOBJ) 2341 { 2342 int rc = RTR0MemObjFree(pGbo->hMemObj, true); 2343 AssertRC(rc); 2344 pGbo->hMemObj = NIL_RTR0MEMOBJ; 2345 } 2346 } 2347 2340 2348 if (pGbo->hMemObjPT != NIL_RTR0MEMOBJ) 2341 2349 { … … 2344 2352 pGbo->hMemObjPT = NIL_RTR0MEMOBJ; 2345 2353 } 2354 2355 #ifdef DEBUG 2356 ASMAtomicDecU32(&pSvga->cAllocatedGbos); 2357 GALOG(("cAllocatedGbos = %u\n", pSvga->cAllocatedGbos)); 2358 #endif 2359 2346 2360 memset(pGbo, 0, sizeof(*pGbo)); 2347 } 2348 2349 NTSTATUS SvgaGboInit(VMSVGAGBO *pGbo, uint32_t cPages) 2361 GaMemFree(pGbo); 2362 } 2363 2364 static NTSTATUS svgaGboInit(VMSVGAGBO *pGbo, uint32_t cPages) 2350 2365 { 2351 2366 /* … … 2399 2414 2400 2415 2401 NTSTATUS SvgaGboFillPageTableForMDL(PVMSVGAGBO pGbo,2402 PMDL pMdl,2403 uint32_t MdlOffset)2416 static NTSTATUS svgaGboFillPageTableForMDL(PVMSVGAGBO pGbo, 2417 PMDL pMdl, 2418 uint32_t MdlOffset) 2404 2419 { 2405 2420 PPFN_NUMBER paMdlPfn = &MmGetMdlPfnArray(pMdl)[MdlOffset]; … … 2428 2443 2429 2444 2430 NTSTATUS SvgaGboFillPageTableForMemObj(PVMSVGAGBO pGbo,2431 RTR0MEMOBJ hMemObj)2445 static NTSTATUS svgaGboFillPageTableForMemObj(PVMSVGAGBO pGbo, 2446 RTR0MEMOBJ hMemObj) 2432 2447 { 2433 2448 if (pGbo->enmMobFormat == SVGA3D_MOBFMT_PTDEPTH64_0) … … 2455 2470 2456 2471 2472 NTSTATUS SvgaGboCreate(VBOXWDDM_EXT_VMSVGA *pSvga, 2473 PVMSVGAGBO *ppGbo, 2474 uint32_t cbGbo, 2475 const char *pszTag) 2476 { 2477 AssertCompile(NIL_RTR0MEMOBJ == 0); 2478 2479 NTSTATUS Status; 2480 2481 PVMSVGAGBO pGbo = (PVMSVGAGBO)GaMemAllocZero(sizeof(VMSVGAGBO)); 2482 AssertReturn(pGbo, STATUS_INSUFFICIENT_RESOURCES); 2483 2484 pGbo->cRefs = 1; 2485 2486 int rc = RTR0MemObjAllocPageTag(&pGbo->hMemObj, cbGbo, false /* executable R0 mapping */, pszTag); 2487 if (RT_SUCCESS(rc)) 2488 { 2489 size_t const cPages = RTR0MemObjSize(pGbo->hMemObj) / PAGE_SIZE; 2490 if (cPages > 0 && cPages < pSvga->u32MaxMobSize / PAGE_SIZE) 2491 { 2492 Status = svgaGboInit(pGbo, (uint32_t)cPages); 2493 if (NT_SUCCESS(Status)) 2494 { 2495 Status = svgaGboFillPageTableForMemObj(pGbo, pGbo->hMemObj); 2496 if (NT_SUCCESS(Status)) 2497 { 2498 #ifdef DEBUG 2499 ASMAtomicIncU32(&pSvga->cAllocatedGbos); 2500 #endif 2501 *ppGbo = pGbo; 2502 GALOG(("gbo = %p %s\n", pGbo, pszTag)); 2503 return STATUS_SUCCESS; 2504 } 2505 } 2506 } 2507 else 2508 AssertFailedStmt(Status = STATUS_INVALID_PARAMETER); 2509 } 2510 else 2511 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 2512 2513 SvgaGboUnreference(pSvga, &pGbo); 2514 return Status; 2515 } 2516 2517 2518 NTSTATUS SvgaGboCreateForMdl(VBOXWDDM_EXT_VMSVGA *pSvga, 2519 PVMSVGAGBO *ppGbo, 2520 SIZE_T NumberOfPages, 2521 PMDL pMdl, 2522 ULONG MdlOffset) 2523 { 2524 NTSTATUS Status; 2525 2526 AssertReturn(NumberOfPages > 0 && NumberOfPages < pSvga->u32MaxMobSize / PAGE_SIZE, STATUS_INVALID_PARAMETER); 2527 2528 PVMSVGAGBO pGbo = (PVMSVGAGBO)GaMemAllocZero(sizeof(VMSVGAGBO)); 2529 AssertReturn(pGbo, STATUS_INSUFFICIENT_RESOURCES); 2530 2531 pGbo->cRefs = 1; 2532 pGbo->flags.fMdl = 1; 2533 pGbo->pMdl = pMdl; 2534 2535 Status = svgaGboInit(pGbo, (uint32_t)NumberOfPages); 2536 if (NT_SUCCESS(Status)) 2537 { 2538 Status = svgaGboFillPageTableForMDL(pGbo, pGbo->pMdl, MdlOffset); 2539 if (NT_SUCCESS(Status)) 2540 { 2541 #ifdef DEBUG 2542 ASMAtomicIncU32(&pSvga->cAllocatedGbos); 2543 #endif 2544 *ppGbo = pGbo; 2545 GALOG(("gbo = %p\n", pGbo)); 2546 return STATUS_SUCCESS; 2547 } 2548 } 2549 2550 SvgaGboUnreference(pSvga, &pGbo); 2551 return Status; 2552 } 2553 2554 2457 2555 /* 2458 2556 * … … 2460 2558 * 2461 2559 */ 2462 2463 static NTSTATUS svgaMobAlloc(VBOXWDDM_EXT_VMSVGA *pSvga, 2464 PVMSVGAMOB *ppMob) 2465 { 2466 GALOG(("[%p]\n", pSvga)); 2467 2468 NTSTATUS Status; 2469 AssertCompile(NIL_RTR0MEMOBJ == 0); 2470 2471 *ppMob = (PVMSVGAMOB)GaMemAllocZero(sizeof(VMSVGAMOB)); 2472 AssertReturn(*ppMob, STATUS_INSUFFICIENT_RESOURCES); 2473 2474 Status = SvgaMobIdAlloc(pSvga, &VMSVGAMOB_ID(*ppMob)); 2475 AssertReturnStmt(NT_SUCCESS(Status), GaMemFree(*ppMob), STATUS_INSUFFICIENT_RESOURCES); 2560 /* Memory OBject tracking structure: and id accisiated with a gbo. */ 2561 typedef struct VMSVGAMOB 2562 { 2563 AVLU32NODECORE core; /* AVL entry. Key is mobid, allocated by the miniport. */ 2564 PVMSVGAGBO pGbo; /* The mob has been created for this GBO and holds a reference to it. */ 2565 uint32_t u64MobFence; /* Mob id can be freed by the guest when the host reports this fence value. */ 2566 RTLISTNODE node; /* VBOXWDDM_EXT_VMSVGA::listMobDeferredDestruction */ 2567 } VMSVGAMOB, *PVMSVGAMOB; 2568 2569 #define VMSVGAMOB_ID(a_pMob) ((a_pMob)->core.Key) 2570 2571 void SvgaMobFree(VBOXWDDM_EXT_VMSVGA *pSvga, 2572 SVGAMobId *pMobid) 2573 { 2574 GALOG(("[%p] %u\n", pSvga, pMobid)); 2575 2576 if (*pMobid != SVGA3D_INVALID_ID) 2577 { 2578 GALOG(("mobid = %u\n", *pMobid)); 2579 2580 KIRQL OldIrql; 2581 KeAcquireSpinLock(&pSvga->MobSpinLock, &OldIrql); 2582 PVMSVGAMOB pMob = (PVMSVGAMOB)RTAvlU32Remove(&pSvga->MobTree, *pMobid); 2583 KeReleaseSpinLock(&pSvga->MobSpinLock, OldIrql); 2584 2585 Assert(pMob); 2586 if (pMob) 2587 { 2588 SvgaGboUnreference(pSvga, &pMob->pGbo); 2589 #ifdef DEBUG 2590 ASMAtomicDecU32(&pSvga->cAllocatedMobs); 2591 GALOG(("cAllocatedMobs = %u\n", pSvga->cAllocatedMobs)); 2592 #endif 2593 2594 NTSTATUS Status = SvgaMobIdFree(pSvga, *pMobid); 2595 Assert(NT_SUCCESS(Status)); RT_NOREF(Status); 2596 GaMemFree(pMob); 2597 } 2598 2599 *pMobid = SVGA3D_INVALID_ID; 2600 } 2601 } 2602 2603 2604 NTSTATUS SvgaMobAlloc(VBOXWDDM_EXT_VMSVGA *pSvga, 2605 SVGAMobId *pMobid, 2606 PVMSVGAGBO pGbo) 2607 { 2608 PVMSVGAMOB pMob = (PVMSVGAMOB)GaMemAllocZero(sizeof(VMSVGAMOB)); 2609 AssertReturn(pMob, STATUS_INSUFFICIENT_RESOURCES); 2610 2611 NTSTATUS Status = SvgaMobIdAlloc(pSvga, &VMSVGAMOB_ID(pMob)); 2612 AssertReturnStmt(NT_SUCCESS(Status), GaMemFree(pMob), STATUS_INSUFFICIENT_RESOURCES); 2613 GALOG(("mobid = %u\n", VMSVGAMOB_ID(pMob))); 2614 2615 pMob->pGbo = pGbo; 2616 SvgaGboReference(pGbo); 2476 2617 2477 2618 KIRQL OldIrql; 2478 2619 KeAcquireSpinLock(&pSvga->MobSpinLock, &OldIrql); 2479 bool fInserted = RTAvlU32Insert(&pSvga->MobTree, & (*ppMob)->core);2620 bool fInserted = RTAvlU32Insert(&pSvga->MobTree, &pMob->core); 2480 2621 KeReleaseSpinLock(&pSvga->MobSpinLock, OldIrql); 2481 2622 Assert(fInserted); RT_NOREF(fInserted); 2482 2623 2483 GALOG(("mobid = %u\n", VMSVGAMOB_ID(*ppMob)));2484 return STATUS_SUCCESS;2485 }2486 2487 void SvgaMobFree(VBOXWDDM_EXT_VMSVGA *pSvga,2488 PVMSVGAMOB pMob)2489 {2490 GALOG(("[%p] %p\n", pSvga, pMob));2491 2492 if (pMob)2493 {2494 GALOG(("mobid = %u\n", VMSVGAMOB_ID(pMob)));2495 2496 KIRQL OldIrql;2497 KeAcquireSpinLock(&pSvga->MobSpinLock, &OldIrql);2498 RTAvlU32Remove(&pSvga->MobTree, pMob->core.Key);2499 KeReleaseSpinLock(&pSvga->MobSpinLock, OldIrql);2500 2501 #ifdef DEBUG2502 ASMAtomicSubU32(&pSvga->cAllocatedMobPages, pMob->gbo.cbGbo / PAGE_SIZE);2503 ASMAtomicDecU32(&pSvga->cAllocatedMobs);2504 #endif2505 2506 SvgaGboFree(&pMob->gbo);2507 2508 if (pMob->hMemObj != NIL_RTR0MEMOBJ)2509 {2510 int rc = RTR0MemObjFree(pMob->hMemObj, true);2511 AssertRC(rc);2512 pMob->hMemObj = NIL_RTR0MEMOBJ;2513 }2514 2515 NTSTATUS Status = SvgaMobIdFree(pSvga, VMSVGAMOB_ID(pMob));2516 Assert(NT_SUCCESS(Status)); RT_NOREF(Status);2517 GaMemFree(pMob);2518 }2519 }2520 2521 PVMSVGAMOB SvgaMobQuery(VBOXWDDM_EXT_VMSVGA *pSvga,2522 uint32_t mobid)2523 {2524 KIRQL OldIrql;2525 KeAcquireSpinLock(&pSvga->MobSpinLock, &OldIrql);2526 PVMSVGAMOB pMob = (PVMSVGAMOB)RTAvlU32Get(&pSvga->MobTree, mobid);2527 KeReleaseSpinLock(&pSvga->MobSpinLock, OldIrql);2528 2529 GALOG(("[%p] mobid = %u -> %p\n", pSvga, mobid, pMob));2530 return pMob;2531 }2532 2533 NTSTATUS SvgaMobCreate(VBOXWDDM_EXT_VMSVGA *pSvga,2534 PVMSVGAMOB *ppMob,2535 uint32_t cMobPages,2536 HANDLE hAllocation)2537 {2538 PVMSVGAMOB pMob;2539 NTSTATUS Status = svgaMobAlloc(pSvga, &pMob);2540 AssertReturn(NT_SUCCESS(Status), Status);2541 2542 Status = SvgaGboInit(&pMob->gbo, cMobPages);2543 AssertReturnStmt(NT_SUCCESS(Status), SvgaMobFree(pSvga, pMob), Status);2544 2545 pMob->hAllocation = hAllocation;2546 *ppMob = pMob;2547 2548 2624 #ifdef DEBUG 2549 2625 ASMAtomicIncU32(&pSvga->cAllocatedMobs); 2550 ASMAtomicAddU32(&pSvga->cAllocatedMobPages, cMobPages);2551 2626 #endif 2552 2627 2628 *pMobid = VMSVGAMOB_ID(pMob); 2553 2629 return STATUS_SUCCESS; 2554 }2555 2556 NTSTATUS SvgaMobSetMemObj(PVMSVGAMOB pMob,2557 RTR0MEMOBJ hMemObj)2558 {2559 NTSTATUS Status = SvgaGboFillPageTableForMemObj(&pMob->gbo, hMemObj);2560 if (NT_SUCCESS(Status))2561 pMob->hMemObj = hMemObj;2562 return Status;2563 2630 } 2564 2631 … … 2584 2651 AssertReturn(id < SVGA_COTABLE_MAX_IDS, STATUS_INVALID_PARAMETER); 2585 2652 2586 /* Allocate a new larger moband inform the host. */2653 /* Allocate a new larger gbo and inform the host. */ 2587 2654 static uint32_t const s_acbEntry[] = 2588 2655 { … … 2616 2683 cbCOT *= 2; 2617 2684 2618 /* Allocate pagesfor the new COTable. */2619 RTR0MEMOBJ hMemObjCOT;2620 int rc = RTR0MemObjAllocPageTag(&hMemObjCOT, cbCOT, false /* executable R0 mapping */, "VMSVGACOT");2621 AssertR CReturn(rc, STATUS_INSUFFICIENT_RESOURCES);2622 2623 /* Allocate a new mob . */2624 PVMSVGAMOB pMob;2625 NTSTATUS Status = SvgaMobCreate(pSvga, &pMob, cbCOT >> PAGE_SHIFT, 0);2685 /* Allocate memory for the new COTable. */ 2686 PVMSVGAGBO pGbo; 2687 NTSTATUS Status = SvgaGboCreate(pSvga, &pGbo, cbCOT, "VMSVGACOT"); 2688 AssertReturn(NT_SUCCESS(Status), Status); 2689 2690 /* Allocate a new mobid. */ 2691 SVGAMobId mobid; 2692 Status = SvgaMobAlloc(pSvga, &mobid, pGbo); 2626 2693 AssertReturnStmt(NT_SUCCESS(Status), 2627 RTR0MemObjFree(hMemObjCOT, true),2694 SvgaGboUnreference(pSvga, &pGbo), 2628 2695 Status); 2629 2696 2630 Status = SvgaMobSetMemObj(pMob, hMemObjCOT); 2631 AssertReturnStmt(NT_SUCCESS(Status), 2632 SvgaMobFree(pSvga, pMob); RTR0MemObjFree(hMemObjCOT, true), 2633 Status); 2697 /* Now the new allocated mob holds a reference to the gbo. */ 2698 SvgaGboUnreference(pSvga, &pGbo); 2634 2699 2635 2700 /* Emit commands. */ 2636 void *pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DEFINE_GB_MOB64, sizeof(SVGA3dCmdDefineGBMob64), SVGA3D_INVALID_ID); 2701 uint32_t cbCmd = 0; 2702 SvgaMobDefine(pSvga, SVGA3D_INVALID_ID, NULL, 0, &cbCmd); 2703 void *pvCmd = SvgaCmdBufReserve(pSvga, cbCmd, SVGA3D_INVALID_ID); 2637 2704 if (pvCmd) 2638 2705 { 2639 SVGA3dCmdDefineGBMob64 *pCmd = (SVGA3dCmdDefineGBMob64 *)pvCmd; 2640 pCmd->mobid = VMSVGAMOB_ID(pMob); 2641 pCmd->ptDepth = pMob->gbo.enmMobFormat; 2642 pCmd->base = pMob->gbo.base; 2643 pCmd->sizeInBytes = pMob->gbo.cbGbo; 2644 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 2645 } 2646 else 2647 AssertFailedReturnStmt(SvgaMobFree(pSvga, pMob); RTR0MemObjFree(hMemObjCOT, true), 2706 SvgaMobDefine(pSvga, mobid, pvCmd, cbCmd, &cbCmd); 2707 SvgaCmdBufCommit(pSvga, cbCmd); 2708 } 2709 else 2710 AssertFailedReturnStmt(SvgaMobFree(pSvga, &mobid), 2648 2711 STATUS_INSUFFICIENT_RESOURCES); 2649 2712 … … 2656 2719 SVGA3dCmdDXSetCOTable *pCmd = (SVGA3dCmdDXSetCOTable *)pvCmd; 2657 2720 pCmd->cid = pSvgaContext->u32Cid; 2658 pCmd->mobid = VMSVGAMOB_ID(pMob);2721 pCmd->mobid = mobid; 2659 2722 pCmd->type = enmType; 2660 2723 pCmd->validSizeInBytes = pCOT->cEntries * s_acbEntry[idxCOTable]; … … 2662 2725 } 2663 2726 else 2664 AssertFailedReturnStmt(SvgaMobFree(pSvga, pMob); RTR0MemObjFree(hMemObjCOT, true),2727 AssertFailedReturnStmt(SvgaMobFree(pSvga, &mobid), 2665 2728 STATUS_INSUFFICIENT_RESOURCES); 2666 2729 } … … 2673 2736 SVGA3dCmdDXGrowCOTable *pCmd = (SVGA3dCmdDXGrowCOTable *)pvCmd; 2674 2737 pCmd->cid = pSvgaContext->u32Cid; 2675 pCmd->mobid = VMSVGAMOB_ID(pMob);2738 pCmd->mobid = mobid; 2676 2739 pCmd->type = enmType; 2677 2740 pCmd->validSizeInBytes = pCOT->cEntries * s_acbEntry[idxCOTable]; … … 2679 2742 } 2680 2743 else 2681 AssertFailedReturnStmt(SvgaMobFree(pSvga, pMob); RTR0MemObjFree(hMemObjCOT, true),2744 AssertFailedReturnStmt(SvgaMobFree(pSvga, &mobid), 2682 2745 STATUS_INSUFFICIENT_RESOURCES); 2683 2746 2747 /* Delete old mob. */ 2684 2748 uint32_t cbCmdRequired = 0; 2685 SvgaMobDestroy(pSvga, pCOT->pMob, NULL, 0, &cbCmdRequired);2749 SvgaMobDestroy(pSvga, 0, NULL, 0, &cbCmdRequired); 2686 2750 pvCmd = SvgaCmdBufReserve(pSvga, cbCmdRequired, SVGA3D_INVALID_ID); 2687 2751 if (pvCmd) 2688 2752 { 2689 SvgaMobDestroy(pSvga, pCOT-> pMob, pvCmd, cbCmdRequired, &cbCmdRequired);2753 SvgaMobDestroy(pSvga, pCOT->mobid, pvCmd, cbCmdRequired, &cbCmdRequired); 2690 2754 SvgaCmdBufCommit(pSvga, cbCmdRequired); 2691 2755 } 2692 2756 2693 pCOT-> pMob = NULL;2757 pCOT->mobid = SVGA3D_INVALID_ID; 2694 2758 } 2695 2759 2696 2760 SvgaCmdBufFlush(pSvga); 2697 2761 2698 pCOT-> pMob = pMob;2762 pCOT->mobid = mobid; 2699 2763 pCOT->cEntries = cbCOT / s_acbEntry[idxCOTable]; 2700 2764 … … 2702 2766 } 2703 2767 2768 2769 void *SvgaMobAddress(VBOXWDDM_EXT_VMSVGA *pSvga, 2770 SVGAMobId mobid) 2771 { 2772 KIRQL OldIrql; 2773 KeAcquireSpinLock(&pSvga->MobSpinLock, &OldIrql); 2774 PVMSVGAMOB pMob = (PVMSVGAMOB)RTAvlU32Get(&pSvga->MobTree, mobid); 2775 KeReleaseSpinLock(&pSvga->MobSpinLock, OldIrql); 2776 AssertReturn(pMob && pMob->pGbo, NULL); 2777 2778 return RTR0MemObjAddress(pMob->pGbo->hMemObj); 2779 } 2780 2781 2782 NTSTATUS SvgaMobDefine(VBOXWDDM_EXT_VMSVGA *pSvga, 2783 SVGAMobId mobid, 2784 void *pvCmd, 2785 uint32_t cbReserved, 2786 uint32_t *pcbCmd) 2787 { 2788 uint32_t cbRequired = sizeof(SVGA3dCmdHeader) + sizeof(SVGA3dCmdDefineGBMob64); 2789 2790 *pcbCmd = cbRequired; 2791 if (cbReserved < cbRequired) 2792 return STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER; 2793 2794 /* Find the mob. */ 2795 KIRQL OldIrql; 2796 KeAcquireSpinLock(&pSvga->MobSpinLock, &OldIrql); 2797 PVMSVGAMOB pMob = (PVMSVGAMOB)RTAvlU32Get(&pSvga->MobTree, mobid); 2798 KeReleaseSpinLock(&pSvga->MobSpinLock, OldIrql); 2799 AssertReturn(pMob && pMob->pGbo, STATUS_INVALID_PARAMETER); 2800 2801 /* Generate commands. */ 2802 uint8_t *pu8Cmd = (uint8_t *)pvCmd; 2803 SVGA3dCmdHeader *pHdr; 2804 2805 pHdr = (SVGA3dCmdHeader *)pu8Cmd; 2806 pHdr->id = SVGA_3D_CMD_DEFINE_GB_MOB64; 2807 pHdr->size = sizeof(SVGA3dCmdDefineGBMob64); 2808 pu8Cmd += sizeof(*pHdr); 2809 2810 { 2811 SVGA3dCmdDefineGBMob64 *pCmd = (SVGA3dCmdDefineGBMob64 *)pu8Cmd; 2812 pCmd->mobid = mobid; 2813 pCmd->ptDepth = pMob->pGbo->enmMobFormat; 2814 pCmd->base = pMob->pGbo->base; 2815 pCmd->sizeInBytes = pMob->pGbo->cbGbo; 2816 pu8Cmd += sizeof(*pCmd); 2817 } 2818 2819 Assert((uintptr_t)pu8Cmd - (uintptr_t)pvCmd == cbRequired); 2820 2821 return STATUS_SUCCESS; 2822 } 2704 2823 2705 2824 … … 2720 2839 */ 2721 2840 NTSTATUS SvgaMobDestroy(VBOXWDDM_EXT_VMSVGA *pSvga, 2722 PVMSVGAMOB pMob,2841 SVGAMobId mobid, 2723 2842 void *pvCmd, 2724 2843 uint32_t cbReserved, … … 2732 2851 return STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER; 2733 2852 2853 /* Find the mob. */ 2854 KIRQL OldIrql; 2855 KeAcquireSpinLock(&pSvga->MobSpinLock, &OldIrql); 2856 PVMSVGAMOB pMob = (PVMSVGAMOB)RTAvlU32Get(&pSvga->MobTree, mobid); 2857 KeReleaseSpinLock(&pSvga->MobSpinLock, OldIrql); 2858 AssertReturn(pMob, STATUS_INVALID_PARAMETER); 2859 2860 /* Mob will be actually deallocated after the host processes the fence. */ 2861 pMob->u64MobFence = ASMAtomicIncU64(&pSvga->u64MobFence); 2862 2863 /* Add the mob to the deferred destruction queue. */ 2864 SvgaHostObjectsLock(pSvga, &OldIrql); 2865 RTListAppend(&pSvga->listMobDeferredDestruction, &pMob->node); 2866 SvgaHostObjectsUnlock(pSvga, OldIrql); 2867 2868 /* Generate commands. */ 2734 2869 uint8_t *pu8Cmd = (uint8_t *)pvCmd; 2735 2870 SVGA3dCmdHeader *pHdr; … … 2742 2877 { 2743 2878 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pu8Cmd; 2744 pCmd->mobid = VMSVGAMOB_ID(pMob);2879 pCmd->mobid = mobid; 2745 2880 pu8Cmd += sizeof(*pCmd); 2746 2881 } 2747 2748 pMob->u64MobFence = ASMAtomicIncU64(&pSvga->u64MobFence);2749 2882 2750 2883 pHdr = (SVGA3dCmdHeader *)pu8Cmd; … … 2756 2889 SVGA3dCmdDXMobFence64 *pCmd = (SVGA3dCmdDXMobFence64 *)pu8Cmd; 2757 2890 pCmd->value = pMob->u64MobFence; 2758 pCmd->mobId = VMSVGAMOB_ID(pSvga->pMiniportMob);2891 pCmd->mobId = pSvga->mobidMiniport; 2759 2892 pCmd->mobOffset = RT_OFFSETOF(VMSVGAMINIPORTMOB, u64MobFence); 2760 2893 pu8Cmd += sizeof(*pCmd); 2761 2894 } 2762 2895 2763 /* Add the mob to the deferred destruction queue. */2764 KIRQL OldIrql;2765 SvgaHostObjectsLock(pSvga, &OldIrql);2766 RTListAppend(&pSvga->listMobDeferredDestruction, &pMob->node);2767 SvgaHostObjectsUnlock(pSvga, OldIrql);2768 2769 2896 Assert((uintptr_t)pu8Cmd - (uintptr_t)pvCmd == cbRequired); 2770 2897 2771 2898 return STATUS_SUCCESS; 2772 2899 } 2900 2901 DECLINLINE(int) SvgaFenceCmp64(uint64_t u64FenceA, uint64_t u64FenceB) 2902 { 2903 if ( u64FenceA < u64FenceB 2904 || u64FenceA - u64FenceB > UINT64_MAX / 2) 2905 { 2906 return -1; /* FenceA is newer than FenceB. */ 2907 } 2908 else if (u64FenceA == u64FenceB) 2909 { 2910 /* FenceA is equal to FenceB. */ 2911 return 0; 2912 } 2913 2914 /* FenceA is older than FenceB. */ 2915 return 1; 2916 } 2917 2918 void SvgaDeferredMobDestruction(PVBOXWDDM_EXT_VMSVGA pSvga) 2919 { 2920 if (pSvga->pMiniportMobData) 2921 { 2922 uint64_t const u64MobFence = ASMAtomicReadU64(&pSvga->pMiniportMobData->u64MobFence); 2923 2924 /* Move mobs which were deleted by the host to the local list under the lock. */ 2925 RTLISTANCHOR listDestroyedMobs; 2926 RTListInit(&listDestroyedMobs); 2927 2928 KIRQL OldIrql; 2929 SvgaHostObjectsLock(pSvga, &OldIrql); 2930 2931 PVMSVGAMOB pIter, pNext; 2932 RTListForEachSafe(&pSvga->listMobDeferredDestruction, pIter, pNext, VMSVGAMOB, node) 2933 { 2934 if (SvgaFenceCmp64(pIter->u64MobFence, u64MobFence) <= 0) 2935 { 2936 RTListNodeRemove(&pIter->node); 2937 RTListAppend(&listDestroyedMobs, &pIter->node); 2938 } 2939 } 2940 2941 SvgaHostObjectsUnlock(pSvga, OldIrql); 2942 2943 RTListForEachSafe(&listDestroyedMobs, pIter, pNext, VMSVGAMOB, node) 2944 { 2945 /* Delete the data. SvgaMobFree deallocates pIter. */ 2946 RTListNodeRemove(&pIter->node); 2947 SVGAMobId mobid = VMSVGAMOB_ID(pIter); 2948 SvgaMobFree(pSvga, &mobid); 2949 } 2950 } 2951 } 2952 2953 #ifdef DEBUG 2954 static DECLCALLBACK(int) mobDumpCb(PAVLU32NODECORE pNode, void *pvUser) 2955 { 2956 PVMSVGAMOB pMob = (PVMSVGAMOB)pNode; 2957 uint32_t *pcMobs = (uint32_t *)pvUser; 2958 *pcMobs += 1; 2959 2960 GALOG(("mobid = %u, refs = %d, mdl = %u, cb = %u\n", 2961 VMSVGAMOB_ID(pMob), 2962 pMob->pGbo ? pMob->pGbo->cRefs : -1, 2963 pMob->pGbo ? pMob->pGbo->flags.fMdl : 2, 2964 pMob->pGbo ? pMob->pGbo->cbGbo : 0)); 2965 return 0; 2966 } 2967 #endif /* DEBUG */ -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/Svga.h
r108786 r109165 5 5 6 6 /* 7 * Copyright (C) 2016-202 4Oracle and/or its affiliates.7 * Copyright (C) 2016-2025 Oracle and/or its affiliates. 8 8 * 9 9 * This file is part of VirtualBox base platform packages, as … … 34 34 #include "VBoxMPGaUtils.h" 35 35 36 #include <iprt/asm.h> 36 37 #include <iprt/assert.h> 37 38 #include <iprt/avl.h> … … 164 165 typedef struct VMSVGAGBO 165 166 { 167 int32_t volatile cRefs; /* Reference count */ 168 struct 169 { 170 uint32_t fMdl : 1; 171 uint32_t reserved : 31; 172 } flags; 166 173 uint32_t cbGbo; /* Size of gbo in bytes. */ 167 174 uint32_t cPTPages; /* How many pages are required to hold PPN64 page table. */ 168 175 SVGAMobFormat enmMobFormat; /* Page table format. */ 176 union /* Backing memory. */ 177 { 178 PMDL pMdl; 179 RTR0MEMOBJ hMemObj; 180 }; 169 181 PPN64 base; /* Page which contains the page table. */ 170 182 RTR0MEMOBJ hMemObjPT; /* Page table pages. */ … … 174 186 #define SVGA3D_MAX_MOBS (SVGA3D_MAX_CONTEXT_IDS + SVGA3D_MAX_CONTEXT_IDS + SVGA3D_MAX_SURFACE_IDS) 175 187 176 /* Memory OBject: a gbo with an id, possibly bound to an allocation. */177 typedef struct VMSVGAMOB178 {179 AVLU32NODECORE core; /* AVL entry. Key is mobid, allocated by the miniport. */180 HANDLE hAllocation; /* Allocation which is bound to the mob. */181 VMSVGAGBO gbo; /* Gbo for this mob. */182 RTR0MEMOBJ hMemObj; /* The guest memory if allocated by miniport. */183 uint32_t u64MobFence; /* Free by the guest when the host reports this fence value. */184 RTLISTNODE node; /* VBOXWDDM_EXT_VMSVGA::listMobDeferredDestruction */185 } VMSVGAMOB, *PVMSVGAMOB;186 187 #define VMSVGAMOB_ID(a_pMob) ((a_pMob)->core.Key)188 189 188 typedef struct VMSVGAOT 190 189 { 191 VMSVGAGBO gbo; 192 RTR0MEMOBJ hMemObj; 190 PVMSVGAGBO pGbo; 193 191 uint32_t cEntries; /* How many objects can be stored in the OTable. */ 194 192 } VMSVGAOT, *PVMSVGAOT; … … 213 211 uint32_t u32GmrMaxPages; /** SVGA_REG_GMRS_MAX_PAGES */ 214 212 uint32_t u32MemorySize; /** SVGA_REG_MEMORY_SIZE */ 213 uint32_t u32MaxMobSize; /** SVGA_REG_MOB_MAX_SIZE */ 215 214 uint32_t u32MaxTextureWidth; /** SVGA3D_DEVCAP_MAX_TEXTURE_WIDTH */ 216 215 uint32_t u32MaxTextureHeight; /** SVGA3D_DEVCAP_MAX_TEXTURE_HEIGHT */ … … 261 260 VMSVGAOT aOT[SVGA_OTABLE_DX_MAX]; 262 261 263 PVMSVGAMOB pMiniportMob; /* Used by miniport to communicate with the device. */ 262 /* Used by miniport to communicate with the device. */ 263 PVMSVGAGBO pMiniportGbo; 264 SVGAMobId mobidMiniport; 264 265 struct VMSVGAMINIPORTMOB volatile *pMiniportMobData; /* Pointer to the miniport mob content. */ 265 266 266 267 uint64_t volatile u64MobFence; 267 268 RTLISTANCHOR listMobDeferredDestruction; /* Mob to be deleted after. */ 269 int32_t volatile cQueuedWorkItems; 268 270 269 271 #ifdef DEBUG 270 272 /* Statistics. */ 273 uint32_t volatile cAllocatedGbos; 271 274 uint32_t volatile cAllocatedMobs; 272 uint32_t volatile cAllocatedMobPages;273 275 uint32_t volatile cAllocatedGmrs; 274 276 uint32_t volatile cAllocatedGmrPages; … … 303 305 typedef struct VMSVGACOT 304 306 { 305 PVMSVGAMOB pMob;/* COTable mob. */307 SVGAMobId mobid; /* COTable mob. */ 306 308 uint32_t cEntries; /* How many objects can be stored in the COTable. */ 307 309 } VMSVGACOT, *PVMSVGACOT; … … 644 646 #endif 645 647 646 NTSTATUS SvgaGboInit(VMSVGAGBO *pGbo, uint32_t cPages); 647 void SvgaGboFree(VMSVGAGBO *pGbo); 648 NTSTATUS SvgaGboFillPageTableForMDL(PVMSVGAGBO pGbo, 649 PMDL pMdl, 650 uint32_t MdlOffset); 651 NTSTATUS SvgaGboFillPageTableForMemObj(PVMSVGAGBO pGbo, 652 RTR0MEMOBJ hMemObj); 648 NTSTATUS SvgaGboCreate(VBOXWDDM_EXT_VMSVGA *pSvga, 649 PVMSVGAGBO *ppGbo, 650 uint32_t cbGbo, 651 const char *pszTag); 652 NTSTATUS SvgaGboCreateForMdl(VBOXWDDM_EXT_VMSVGA *pSvga, 653 PVMSVGAGBO *ppGbo, 654 SIZE_T NumberOfPages, 655 PMDL pMdl, 656 ULONG MdlOffset); 657 void SvgaGboFree(VBOXWDDM_EXT_VMSVGA *pSvga, 658 VMSVGAGBO *pGbo); 659 660 DECLINLINE(void) SvgaGboReference(PVMSVGAGBO pGbo) 661 { 662 if (pGbo) 663 { 664 int32_t const c = ASMAtomicIncS32(&pGbo->cRefs); 665 Assert(c > 0); RT_NOREF(c); 666 } 667 } 668 669 DECLINLINE(void) SvgaGboUnreference(VBOXWDDM_EXT_VMSVGA *pSvga, 670 PVMSVGAGBO *ppGbo) 671 { 672 if (*ppGbo) 673 { 674 int32_t const c = ASMAtomicDecS32(&(*ppGbo)->cRefs); 675 Assert(c >= 0); 676 if (c == 0) 677 SvgaGboFree(pSvga, *ppGbo); 678 *ppGbo = NULL; 679 } 680 } 653 681 654 682 void SvgaMobFree(VBOXWDDM_EXT_VMSVGA *pSvga, 655 PVMSVGAMOB pMob); 656 PVMSVGAMOB SvgaMobQuery(VBOXWDDM_EXT_VMSVGA *pSvga, 657 uint32_t mobid); 658 NTSTATUS SvgaMobCreate(VBOXWDDM_EXT_VMSVGA *pSvga, 659 PVMSVGAMOB *ppMob, 660 uint32_t cMobPages, 661 HANDLE hAllocation); 662 NTSTATUS SvgaMobSetMemObj(PVMSVGAMOB pMob, 663 RTR0MEMOBJ hMemObj); 683 SVGAMobId *pMobid); 684 NTSTATUS SvgaMobAlloc(VBOXWDDM_EXT_VMSVGA *pSvga, 685 SVGAMobId *pMobid, 686 PVMSVGAGBO pGbo); 687 void *SvgaMobAddress(VBOXWDDM_EXT_VMSVGA *pSvga, 688 SVGAMobId mobid); 689 NTSTATUS SvgaMobDefine(VBOXWDDM_EXT_VMSVGA *pSvga, 690 SVGAMobId mobid, 691 void *pvCmd, 692 uint32_t cbReserved, 693 uint32_t *pcbCmd); 664 694 NTSTATUS SvgaMobDestroy(VBOXWDDM_EXT_VMSVGA *pSvga, 665 PVMSVGAMOB pMob,695 SVGAMobId mobid, 666 696 void *pvCmd, 667 697 uint32_t cbReserved, 668 698 uint32_t *pcbCmd); 699 void SvgaDeferredMobDestruction(PVBOXWDDM_EXT_VMSVGA pSvga); 669 700 670 701 NTSTATUS SvgaCOTNotifyId(VBOXWDDM_EXT_VMSVGA *pSvga, -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/VBoxMPDX.cpp
r106061 r109165 5 5 6 6 /* 7 * Copyright (C) 2022-202 4Oracle and/or its affiliates.7 * Copyright (C) 2022-2025 Oracle and/or its affiliates. 8 8 * 9 9 * This file is part of VirtualBox base platform packages, as … … 99 99 100 100 101 static void svga FreeGBMobForAllocation(VBOXWDDM_EXT_VMSVGA *pSvga, PVBOXWDDM_ALLOCATION pAllocation)102 { 103 AssertReturnVoid(pAllocation->dx.SegmentId == 3 || pAllocation->dx.desc.fPrimary);104 105 uint32_t cbRequired = 0;106 SvgaMobDestroy(pSvga, pAllocation->dx.gb.pMob, NULL, 0, &cbRequired);107 void *pvCmd = SvgaCmdBufReserve(pSvga, cbRequired, SVGA3D_INVALID_ID);108 if (pvCmd)109 {110 SvgaMobDestroy(pSvga, pAllocation->dx.gb.pMob, pvCmd, cbRequired, &cbRequired);111 SvgaCmdBufCommit(pSvga, cbRequired);112 }113 114 pAllocation->dx.gb.pMob = NULL;115 pAllocation->dx.mobid = SVGA3D_INVALID_ID;116 } 117 118 119 static NTSTATUS svga CreateGBMobForAllocation(VBOXWDDM_EXT_VMSVGA *pSvga, PVBOXWDDM_ALLOCATION pAllocation)101 static void svgaDestroyMobForAllocation(VBOXWDDM_EXT_VMSVGA *pSvga, PVBOXWDDM_ALLOCATION pAllocation) 102 { 103 if (pAllocation->dx.mobid != SVGA3D_INVALID_ID) 104 { 105 uint32_t cbRequired = 0; 106 SvgaMobDestroy(pSvga, SVGA3D_INVALID_ID, NULL, 0, &cbRequired); 107 void *pvCmd = SvgaCmdBufReserve(pSvga, cbRequired, SVGA3D_INVALID_ID); 108 if (pvCmd) 109 { 110 SvgaMobDestroy(pSvga, pAllocation->dx.mobid, pvCmd, cbRequired, &cbRequired); 111 SvgaCmdBufCommit(pSvga, cbRequired); 112 } 113 114 pAllocation->dx.mobid = SVGA3D_INVALID_ID; 115 } 116 } 117 118 119 static NTSTATUS svgaDefineMobForAllocation(VBOXWDDM_EXT_VMSVGA *pSvga, PVBOXWDDM_ALLOCATION pAllocation) 120 120 { 121 121 AssertReturn(pAllocation->dx.SegmentId == 3 || pAllocation->dx.desc.fPrimary, STATUS_INVALID_PARAMETER); 122 122 123 uint32_t const cbGB = RT_ALIGN_32(pAllocation->dx.desc.cbAllocation, PAGE_SIZE); 124 125 /* Allocate guest backing pages. */ 126 RTR0MEMOBJ hMemObjGB; 127 int rc = RTR0MemObjAllocPageTag(&hMemObjGB, cbGB, false /* executable R0 mapping */, "VMSVGAGB"); 128 AssertRCReturn(rc, STATUS_INSUFFICIENT_RESOURCES); 129 130 /* Allocate a new mob. */ 131 NTSTATUS Status = SvgaMobCreate(pSvga, &pAllocation->dx.gb.pMob, cbGB >> PAGE_SHIFT, 0); 132 Assert(NT_SUCCESS(Status)); 123 /* Allocate a mobid. */ 124 NTSTATUS Status = SvgaMobAlloc(pSvga, &pAllocation->dx.mobid, pAllocation->dx.pGbo); 133 125 if (NT_SUCCESS(Status)) 134 126 { 135 Status = SvgaMobSetMemObj(pAllocation->dx.gb.pMob, hMemObjGB); 136 Assert(NT_SUCCESS(Status)); 137 if (NT_SUCCESS(Status)) 138 { 139 pAllocation->dx.mobid = VMSVGAMOB_ID(pAllocation->dx.gb.pMob); 140 141 void *pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DEFINE_GB_MOB64, sizeof(SVGA3dCmdDefineGBMob64), SVGA3D_INVALID_ID); 142 if (pvCmd) 143 { 144 SVGA3dCmdDefineGBMob64 *pCmd = (SVGA3dCmdDefineGBMob64 *)pvCmd; 145 pCmd->mobid = VMSVGAMOB_ID(pAllocation->dx.gb.pMob); 146 pCmd->ptDepth = pAllocation->dx.gb.pMob->gbo.enmMobFormat; 147 pCmd->base = pAllocation->dx.gb.pMob->gbo.base; 148 pCmd->sizeInBytes = pAllocation->dx.gb.pMob->gbo.cbGbo; 149 SvgaCmdBufCommit(pSvga, sizeof(SVGA3dCmdDefineGBMob64)); 150 } 151 else 152 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 153 154 if (NT_SUCCESS(Status)) 155 return STATUS_SUCCESS; 156 } 157 } 158 159 svgaFreeGBMobForAllocation(pSvga, pAllocation); 127 /* Inform the host about the mob. */ 128 uint32_t cbCmd = 0; 129 SvgaMobDefine(pSvga, SVGA3D_INVALID_ID, NULL, 0, &cbCmd); 130 void *pvCmd = SvgaCmdBufReserve(pSvga, cbCmd, SVGA3D_INVALID_ID); 131 if (pvCmd) 132 { 133 SvgaMobDefine(pSvga, pAllocation->dx.mobid, pvCmd, cbCmd, &cbCmd); 134 SvgaCmdBufCommit(pSvga, cbCmd); 135 return STATUS_SUCCESS; 136 } 137 138 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 139 /* Deallocate mobid. */ 140 SvgaMobFree(pSvga, &pAllocation->dx.mobid); 141 } 142 160 143 return Status; 161 144 } … … 210 193 { 211 194 /* USAGE_STAGING */ 212 /** @todo Maybe use VRAM? */ 213 pAllocationInfo->PreferredSegment.SegmentId0 = 0; 195 pAllocationInfo->PreferredSegment.Value = 0; 214 196 pAllocationInfo->SupportedReadSegmentSet = 2; /* Aperture */ 215 197 pAllocationInfo->SupportedWriteSegmentSet = 2; /* Aperture */ … … 231 213 if (pAllocation->dx.SegmentId == 3 || pAllocation->dx.desc.fPrimary) 232 214 { 233 Status = svgaCreateGBMobForAllocation(pSvga, pAllocation); 215 uint32_t const cbGB = RT_ALIGN_32(pAllocation->dx.desc.cbAllocation, PAGE_SIZE); 216 217 Status = SvgaGboCreate(pSvga, &pAllocation->dx.pGbo, cbGB, "VMSVGAGB"); 234 218 if (NT_SUCCESS(Status)) 235 219 { 236 Status = svgaCreateSurfaceForAllocation(pSvga, pAllocation); 220 Status = svgaDefineMobForAllocation(pSvga, pAllocation); 221 if (NT_SUCCESS(Status)) 222 { 223 Status = svgaCreateSurfaceForAllocation(pSvga, pAllocation); 224 if (!NT_SUCCESS(Status)) 225 svgaDestroyMobForAllocation(pSvga, pAllocation); 226 } 227 237 228 if (!NT_SUCCESS(Status)) 238 svgaFreeGBMobForAllocation(pSvga, pAllocation);229 SvgaGboUnreference(pSvga, &pAllocation->dx.pGbo); 239 230 } 240 231 } … … 273 264 { 274 265 void *pvCmd; 275 if (pAllocation->dx.SegmentId == 3 || pAllocation->dx.desc.fPrimary) 276 { 266 if (pAllocation->dx.mobid != SVGA3D_INVALID_ID) 267 { 268 /* Unbind mob */ 269 Assert(pAllocation->dx.SegmentId == 3 || pAllocation->dx.desc.fPrimary); 270 277 271 pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_BIND_GB_SURFACE, sizeof(SVGA3dCmdBindGBSurface), SVGA3D_INVALID_ID); 278 272 if (pvCmd) … … 294 288 295 289 Status = SvgaSurfaceIdFree(pSvga, pAllocation->dx.sid); 296 297 if (pAllocation->dx.SegmentId == 3 || pAllocation->dx.desc.fPrimary)298 svgaFreeGBMobForAllocation(pSvga, pAllocation);299 300 290 pAllocation->dx.sid = SVGA3D_INVALID_ID; 291 292 svgaDestroyMobForAllocation(pSvga, pAllocation); 301 293 } 302 294 return Status; … … 306 298 static NTSTATUS svgaDestroyAllocationShaders(VBOXWDDM_EXT_VMSVGA *pSvga, PVBOXWDDM_ALLOCATION pAllocation) 307 299 { 308 NTSTATUS Status = STATUS_SUCCESS; 309 if (pAllocation->dx.mobid != SVGA3D_INVALID_ID) 310 { 311 void *pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DESTROY_GB_MOB, sizeof(SVGA3dCmdDestroyGBMob), SVGA3D_INVALID_ID); 312 if (pvCmd) 313 { 314 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pvCmd; 315 pCmd->mobid = pAllocation->dx.mobid; 316 SvgaCmdBufCommit(pSvga, sizeof(SVGA3dCmdDestroyGBMob)); 317 } 318 else 319 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 320 321 pAllocation->dx.mobid = SVGA3D_INVALID_ID; 322 } 323 return Status; 300 svgaDestroyMobForAllocation(pSvga, pAllocation); 301 return STATUS_SUCCESS; 324 302 } 325 303 … … 344 322 pAllocation->enmType = VBOXWDDM_ALLOC_TYPE_D3D; 345 323 pAllocation->dx.desc = *(PVBOXDXALLOCATIONDESC)pAllocationInfo->pPrivateDriverData; 346 pAllocation->dx.desc.cbAllocation = pAllocation->dx.desc.cbAllocation;347 324 pAllocation->dx.sid = SVGA3D_INVALID_ID; 348 325 pAllocation->dx.mobid = SVGA3D_INVALID_ID; 349 pAllocation->dx.SegmentId = 0;350 pAllocation->dx.pMDL= 0;326 //pAllocation->dx.SegmentId = 0; 327 //pAllocation->dx.pGbo = 0; 351 328 352 329 KeInitializeSpinLock(&pAllocation->OpenLock); … … 396 373 else 397 374 AssertFailedReturn(STATUS_INVALID_PARAMETER); 375 376 SvgaGboUnreference(pDevExt->pGa->hw.pSvga, &pAllocation->dx.pGbo); 398 377 399 378 RT_ZERO(*pAllocation); … … 756 735 if (pBuildPagingBuffer->Fill.Destination.SegmentId == 3 || pAllocation->dx.desc.fPrimary) 757 736 { 758 AssertReturn(pAllocation->dx. gb.pMob->hMemObj != NIL_RTR0MEMOBJ, STATUS_INVALID_PARAMETER);759 pvDst = RTR0MemObjAddress(pAllocation->dx. gb.pMob->hMemObj);737 AssertReturn(pAllocation->dx.pGbo->hMemObj != NIL_RTR0MEMOBJ, STATUS_INVALID_PARAMETER); 738 pvDst = RTR0MemObjAddress(pAllocation->dx.pGbo->hMemObj); 760 739 } 761 740 else 762 741 { 763 AssertReturn(pAllocation->dx.p MDL!= NULL, STATUS_INVALID_PARAMETER);742 AssertReturn(pAllocation->dx.pGbo->flags.fMdl && pAllocation->dx.pGbo->pMdl != NULL, STATUS_INVALID_PARAMETER); 764 743 DEBUG_BREAKPOINT_TEST(); 765 pvDst = MmGetSystemAddressForMdlSafe(pAllocation->dx.p MDL, NormalPagePriority);744 pvDst = MmGetSystemAddressForMdlSafe(pAllocation->dx.pGbo->pMdl, NormalPagePriority); 766 745 AssertReturn(pvDst, STATUS_INSUFFICIENT_RESOURCES); 767 746 } … … 855 834 } 856 835 857 PVMSVGAMOB pMob; 858 NTSTATUS Status = SvgaMobCreate(pSvga, &pMob, 859 pBuildPagingBuffer->MapApertureSegment.NumberOfPages, 860 pBuildPagingBuffer->MapApertureSegment.hAllocation); 861 AssertReturn(NT_SUCCESS(Status), Status); 862 863 Status = SvgaGboFillPageTableForMDL(&pMob->gbo, pBuildPagingBuffer->MapApertureSegment.pMdl, 864 pBuildPagingBuffer->MapApertureSegment.MdlOffset); 865 AssertReturnStmt(NT_SUCCESS(Status), SvgaMobFree(pSvga, pMob), Status); 866 867 uint32_t cbRequired = sizeof(SVGA3dCmdHeader) + sizeof(SVGA3dCmdDefineGBMob64); 836 uint32_t cbRequired = 0; 837 SvgaMobDefine(pSvga, SVGA3D_INVALID_ID, NULL, 0, &cbRequired); 868 838 if (pAllocation->dx.desc.enmAllocationType == VBOXDXALLOCATIONTYPE_SURFACE) 869 839 { … … 873 843 874 844 if (pBuildPagingBuffer->DmaSize < cbRequired) 875 {876 SvgaMobFree(pSvga, pMob);877 845 return STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER; 878 } 879 880 pAllocation->dx.mobid = VMSVGAMOB_ID(pMob); 846 847 Assert(pAllocation->dx.pGbo == NULL); 848 849 NTSTATUS Status = SvgaGboCreateForMdl(pSvga, &pAllocation->dx.pGbo, 850 pBuildPagingBuffer->MapApertureSegment.NumberOfPages, 851 pBuildPagingBuffer->MapApertureSegment.pMdl, 852 pBuildPagingBuffer->MapApertureSegment.MdlOffset); 853 AssertReturn(NT_SUCCESS(Status), Status); 854 855 Status = SvgaMobAlloc(pSvga, &pAllocation->dx.mobid, pAllocation->dx.pGbo); 856 AssertReturnStmt(NT_SUCCESS(Status), SvgaGboUnreference(pSvga, &pAllocation->dx.pGbo), Status); 881 857 882 858 uint8_t *pu8Cmd = (uint8_t *)pBuildPagingBuffer->pDmaBuffer; 883 859 884 SVGA3dCmdHeader *pHdr = (SVGA3dCmdHeader *)pu8Cmd; 885 pHdr->id = SVGA_3D_CMD_DEFINE_GB_MOB64; 886 pHdr->size = sizeof(SVGA3dCmdDefineGBMob64); 887 pu8Cmd += sizeof(*pHdr); 888 889 { 890 SVGA3dCmdDefineGBMob64 *pCmd = (SVGA3dCmdDefineGBMob64 *)pu8Cmd; 891 pCmd->mobid = VMSVGAMOB_ID(pMob); 892 pCmd->ptDepth = pMob->gbo.enmMobFormat; 893 pCmd->base = pMob->gbo.base; 894 pCmd->sizeInBytes = pMob->gbo.cbGbo; 895 pu8Cmd += sizeof(*pCmd); 896 } 860 uint32_t cbCmd = 0; 861 SvgaMobDefine(pSvga, pAllocation->dx.mobid, pu8Cmd, 862 cbRequired - ((uintptr_t)pu8Cmd - (uintptr_t)pBuildPagingBuffer->pDmaBuffer), 863 &cbCmd); 864 pu8Cmd += cbCmd; 865 866 SVGA3dCmdHeader *pHdr; 897 867 898 868 if (pAllocation->dx.desc.enmAllocationType == VBOXDXALLOCATIONTYPE_SURFACE) … … 907 877 SVGA3dCmdBindGBSurface *pCmd = (SVGA3dCmdBindGBSurface *)pu8Cmd; 908 878 pCmd->sid = pAllocation->dx.sid; 909 pCmd->mobid = VMSVGAMOB_ID(pMob);879 pCmd->mobid = pAllocation->dx.mobid; 910 880 pu8Cmd += sizeof(*pCmd); 911 881 } … … 945 915 } 946 916 947 /* Find the mob. */948 PVMSVGAMOB pMob = SvgaMobQuery(pSvga, pAllocation->dx.mobid);949 AssertReturn(pMob, STATUS_INVALID_PARAMETER);950 951 917 uint32_t cbRequired = 0; 952 SvgaMobDestroy(pSvga, pMob, NULL, 0, &cbRequired);918 SvgaMobDestroy(pSvga, SVGA3D_INVALID_ID, NULL, 0, &cbRequired); 953 919 if (pAllocation->dx.desc.enmAllocationType == VBOXDXALLOCATIONTYPE_SURFACE) 954 920 cbRequired += sizeof(SVGA3dCmdHeader) + sizeof(SVGA3dCmdBindGBSurface); … … 977 943 978 944 uint32_t cbCmd = 0; 979 NTSTATUS Status = SvgaMobDestroy(pSvga, p Mob, pu8Cmd,945 NTSTATUS Status = SvgaMobDestroy(pSvga, pAllocation->dx.mobid, pu8Cmd, 980 946 cbRequired - ((uintptr_t)pu8Cmd - (uintptr_t)pBuildPagingBuffer->pDmaBuffer), 981 947 &cbCmd); … … 984 950 985 951 pAllocation->dx.mobid = SVGA3D_INVALID_ID; 952 SvgaGboUnreference(pSvga, &pAllocation->dx.pGbo); 986 953 987 954 *pcbCommands = (uintptr_t)pu8Cmd - (uintptr_t)pBuildPagingBuffer->pDmaBuffer; -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/VBoxMPGaWddm.cpp
r106061 r109165 5 5 6 6 /* 7 * Copyright (C) 2016-202 4Oracle and/or its affiliates.7 * Copyright (C) 2016-2025 Oracle and/or its affiliates. 8 8 * 9 9 * This file is part of VirtualBox base platform packages, as … … 183 183 AssertReturn(pSvgaContext, STATUS_INSUFFICIENT_RESOURCES); 184 184 185 pSvgaContext->u32Cid = SVGA3D_INVALID_ID; 186 for (unsigned i = 0; i < RT_ELEMENTS(pSvgaContext->aCOT); ++i) 187 pSvgaContext->aCOT[i].mobid = SVGA3D_INVALID_ID; 185 188 pSvgaContext->fDXContext = RT_BOOL(pInfo->u.vmsvga.u32Flags & VBOXWDDM_F_GA_CONTEXT_VGPU10); 186 189 … … 236 239 { 237 240 PVMSVGACOT pCOT = &pSvgaContext->aCOT[i]; 238 if (pCOT-> pMob)241 if (pCOT->mobid != SVGA3D_INVALID_ID) 239 242 { 240 243 void *pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DX_SET_COTABLE, sizeof(SVGA3dCmdDXSetCOTable), SVGA3D_INVALID_ID); … … 257 260 258 261 uint32_t cbRequired = 0; 259 SvgaMobDestroy(pSvga, pCOT->pMob, NULL, 0, &cbRequired);262 SvgaMobDestroy(pSvga, SVGA3D_INVALID_ID, NULL, 0, &cbRequired); 260 263 pvCmd = SvgaCmdBufReserve(pSvga, cbRequired, SVGA3D_INVALID_ID); 261 264 if (pvCmd) 262 265 { 263 SvgaMobDestroy(pSvga, pCOT-> pMob, pvCmd, cbRequired, &cbRequired);266 SvgaMobDestroy(pSvga, pCOT->mobid, pvCmd, cbRequired, &cbRequired); 264 267 SvgaCmdBufCommit(pSvga, cbRequired); 265 268 } 266 269 267 pCOT-> pMob = NULL;270 pCOT->mobid = SVGA3D_INVALID_ID; 268 271 } 269 272 } … … 1563 1566 1564 1567 PVBOXWDDM_EXT_VMSVGA pSvga = (PVBOXWDDM_EXT_VMSVGA)Context; 1565 if (pSvga->pMiniportMobData) 1566 { 1567 uint64_t const u64MobFence = ASMAtomicReadU64(&pSvga->pMiniportMobData->u64MobFence); 1568 1569 /* Move mobs which were deleted by the host to the local list under the lock. */ 1570 RTLISTANCHOR listDestroyedMobs; 1571 RTListInit(&listDestroyedMobs); 1572 1573 KIRQL OldIrql; 1574 SvgaHostObjectsLock(pSvga, &OldIrql); 1575 1576 PVMSVGAMOB pIter, pNext; 1577 RTListForEachSafe(&pSvga->listMobDeferredDestruction, pIter, pNext, VMSVGAMOB, node) 1578 { 1579 if (gaFenceCmp64(pIter->u64MobFence, u64MobFence) <= 0) 1580 { 1581 RTListNodeRemove(&pIter->node); 1582 RTListAppend(&listDestroyedMobs, &pIter->node); 1583 } 1584 } 1585 1586 SvgaHostObjectsUnlock(pSvga, OldIrql); 1587 1588 RTListForEachSafe(&listDestroyedMobs, pIter, pNext, VMSVGAMOB, node) 1589 { 1590 /* Delete the data. SvgaMobFree deallocates pIter. */ 1591 RTListNodeRemove(&pIter->node); 1592 SvgaMobFree(pSvga, pIter); 1593 } 1594 } 1568 SvgaDeferredMobDestruction(pSvga); 1569 1570 ASMAtomicDecS32(&pSvga->cQueuedWorkItems); 1595 1571 } 1596 1572 … … 1692 1668 PIO_WORKITEM pWorkItem = IoAllocateWorkItem(pDevExt->pPDO); 1693 1669 if (pWorkItem) 1670 { 1671 ASMAtomicIncS32(&pSvga->cQueuedWorkItems); 1694 1672 IoQueueWorkItemEx(pWorkItem, dxDeferredMobDestruction, DelayedWorkQueue, pSvga); 1673 } 1695 1674 } 1696 1675 }
Note:
See TracChangeset
for help on using the changeset viewer.