Changeset 99990 in vbox
- Timestamp:
- May 26, 2023 1:39:41 PM (22 months ago)
- svn:sync-xref-src-repo-rev:
- 157652
- Location:
- trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/VBoxMPTypes.h
r98103 r99990 226 226 struct 227 227 { 228 RTR0MEMOBJ hMemObjGB; /* Guest backing pages for host segment 3. */ 229 struct VMSVGAMOB *pMob; /* Mob for the pages. */ 230 } gb; 228 struct VMSVGAMOB *pMob; /* Mob for the pages (including RTR0MEMOBJ). */ 229 } gb; /** @todo remove the struct */ 231 230 }; 232 231 } dx; -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/Svga.cpp
r98103 r99990 190 190 STATUS_INSUFFICIENT_RESOURCES); 191 191 192 /* Command buffer completion callback to free the COT. */192 /* Command buffer completion callback to free the OT. */ 193 193 struct VMSVGAOTFREE callbackData; 194 194 callbackData.gbo = pOT->gbo; … … 231 231 SvgaObjectTablesDestroy(pSvga); 232 232 233 /* Give the host some time to process them. */ 234 LARGE_INTEGER Interval; 235 Interval.QuadPart = -(int64_t)100 /* ms */ * 10000; 236 KeDelayExecutionThread(KernelMode, FALSE, &Interval); 233 /* Wait for buffers to complete. Up to 5 seconds, arbitrary. */ 234 int cIntervals = 0; 235 while (!SvgaCmdBufIsIdle(pSvga) && cIntervals++ < 50) 236 { 237 /* Give the host some time to process them. */ 238 LARGE_INTEGER Interval; 239 Interval.QuadPart = -(int64_t)100 /* ms */ * 10000; 240 KeDelayExecutionThread(KernelMode, FALSE, &Interval); 241 } 242 243 if (pSvga->u32Caps & SVGA_CAP_COMMAND_BUFFERS) 244 SvgaCmdBufDestroy(pSvga); 237 245 238 246 /* Disable IRQs. */ … … 244 252 /* Disable SVGA. */ 245 253 SVGARegWrite(pSvga, SVGA_REG_ENABLE, SVGA_REG_ENABLE_DISABLE); 246 247 if (pSvga->u32Caps & SVGA_CAP_COMMAND_BUFFERS)248 SvgaCmdBufDestroy(pSvga);249 254 } 250 255 … … 334 339 } 335 340 341 /* Free the miniport mob at last. Can't use SvgaMobDestroy here because it tells the host to write a fence 342 * value to this mob. */ 343 void *pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DESTROY_GB_MOB, sizeof(SVGA3dCmdDestroyGBMob), SVGA3D_INVALID_ID); 344 if (pvCmd) 345 { 346 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pvCmd; 347 pCmd->mobid = VMSVGAMOB_ID(pSvga->pMiniportMob); 348 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 349 } 350 else 351 AssertFailed(); 352 336 353 svgaHwStop(pSvga); 354 355 SvgaMobFree(pSvga, pSvga->pMiniportMob); /* After svgaHwStop because it waits for command buffer completion. */ 337 356 338 357 Status = pDxgkInterface->DxgkCbUnmapMemory(pDxgkInterface->DeviceHandle, … … 411 430 } 412 431 } 432 433 if (NT_SUCCESS(Status)) 434 { 435 uint32_t const cbMiniportMob = RT_ALIGN_32(sizeof(VMSVGAMINIPORTMOB), PAGE_SIZE); 436 RTR0MEMOBJ hMemObjMiniportMob; 437 int rc = RTR0MemObjAllocPageTag(&hMemObjMiniportMob, cbMiniportMob, 438 false /* executable R0 mapping */, "VMSVGAMOB0"); 439 if (RT_SUCCESS(rc)) 440 { 441 Status = SvgaMobCreate(pSvga, &pSvga->pMiniportMob, cbMiniportMob / PAGE_SIZE, 0); 442 if (NT_SUCCESS(Status)) 443 { 444 Status = SvgaMobSetMemObj(pSvga->pMiniportMob, hMemObjMiniportMob); 445 if (NT_SUCCESS(Status)) 446 { 447 void *pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DEFINE_GB_MOB64, sizeof(SVGA3dCmdDefineGBMob64), SVGA3D_INVALID_ID); 448 if (pvCmd) 449 { 450 SVGA3dCmdDefineGBMob64 *pCmd = (SVGA3dCmdDefineGBMob64 *)pvCmd; 451 pCmd->mobid = VMSVGAMOB_ID(pSvga->pMiniportMob); 452 pCmd->ptDepth = pSvga->pMiniportMob->gbo.enmMobFormat; 453 pCmd->base = pSvga->pMiniportMob->gbo.base; 454 pCmd->sizeInBytes = pSvga->pMiniportMob->gbo.cbGbo; 455 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 456 457 pSvga->pMiniportMobData = (VMSVGAMINIPORTMOB volatile *)RTR0MemObjAddress(hMemObjMiniportMob); 458 memset((void *)pSvga->pMiniportMobData, 0, cbMiniportMob); 459 RTListInit(&pSvga->listMobDeferredDestruction); 460 //pSvga->u64MobFence = 0; 461 } 462 else 463 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 464 } 465 } 466 } 467 else 468 AssertFailedStmt(Status = STATUS_INSUFFICIENT_RESOURCES); 469 } 413 470 } 414 471 } … … 420 477 } 421 478 422 if (NT_SUCCESS(Status)) 423 { 424 *ppSvga = pSvga; 425 } 479 /* Caller's 'cleanup on error' code needs this pointer */ 480 *ppSvga = pSvga; 426 481 427 482 return Status; … … 1819 1874 } GAWDDMREGION; 1820 1875 1876 /** @todo Deferred destruction. */ 1821 1877 static void svgaFreeGBMobForGMR(VBOXWDDM_EXT_VMSVGA *pSvga, PVMSVGAMOB pMob) 1822 1878 { … … 1845 1901 if (NT_SUCCESS(Status)) 1846 1902 { 1847 Status = Svga GboFillPageTableForMemObj(&pRegion->pMob->gbo, pRegion->MemObj);1903 Status = SvgaMobSetMemObj(pRegion->pMob, pRegion->MemObj); 1848 1904 Assert(NT_SUCCESS(Status)); 1849 1905 if (NT_SUCCESS(Status)) … … 1884 1940 pRegion->MapObjR3 = NIL_RTR0MEMOBJ; 1885 1941 } 1886 if (pRegion->MemObj != NIL_RTR0MEMOBJ) 1887 { 1888 int rc = RTR0MemObjFree(pRegion->MemObj, true /* fFreeMappings */); 1889 AssertRC(rc); 1890 pRegion->MemObj = NIL_RTR0MEMOBJ; 1942 if (pRegion->pMob == NULL) 1943 { 1944 /* The memory will be deleted by SvgaMobFree. */ 1945 if (pRegion->MemObj != NIL_RTR0MEMOBJ) 1946 { 1947 int rc = RTR0MemObjFree(pRegion->MemObj, true /* fFreeMappings */); 1948 AssertRC(rc); 1949 pRegion->MemObj = NIL_RTR0MEMOBJ; 1950 } 1891 1951 } 1892 1952 } … … 2359 2419 SvgaGboFree(&pMob->gbo); 2360 2420 2421 if (pMob->hMemObj != NIL_RTR0MEMOBJ) 2422 { 2423 int rc = RTR0MemObjFree(pMob->hMemObj, true); 2424 AssertRC(rc); 2425 pMob->hMemObj = NIL_RTR0MEMOBJ; 2426 } 2427 2361 2428 NTSTATUS Status = SvgaMobIdFree(pSvga, VMSVGAMOB_ID(pMob)); 2362 2429 Assert(NT_SUCCESS(Status)); RT_NOREF(Status); … … 2394 2461 } 2395 2462 2396 2397 struct VMSVGACOTFREE 2398 { 2399 PVMSVGAMOB pMob; 2400 RTR0MEMOBJ hMemObj; 2401 }; 2402 2403 2404 static DECLCALLBACK(void) svgaCOTMobFreeCb(VBOXWDDM_EXT_VMSVGA *pSvga, void *pvData, uint32_t cbData) 2405 { 2406 AssertReturnVoid(cbData == sizeof(struct VMSVGACOTFREE)); 2407 struct VMSVGACOTFREE *p = (struct VMSVGACOTFREE *)pvData; 2408 SvgaMobFree(pSvga, p->pMob); 2409 RTR0MemObjFree(p->hMemObj, true); 2463 NTSTATUS SvgaMobSetMemObj(PVMSVGAMOB pMob, 2464 RTR0MEMOBJ hMemObj) 2465 { 2466 NTSTATUS Status = SvgaGboFillPageTableForMemObj(&pMob->gbo, hMemObj); 2467 if (NT_SUCCESS(Status)) 2468 pMob->hMemObj = hMemObj; 2469 return Status; 2410 2470 } 2411 2471 … … 2462 2522 Status); 2463 2523 2464 Status = Svga GboFillPageTableForMemObj(&pMob->gbo, hMemObjCOT);2524 Status = SvgaMobSetMemObj(pMob, hMemObjCOT); 2465 2525 AssertReturnStmt(NT_SUCCESS(Status), 2466 2526 SvgaMobFree(pSvga, pMob); RTR0MemObjFree(hMemObjCOT, true), … … 2516 2576 STATUS_INSUFFICIENT_RESOURCES); 2517 2577 2518 pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DESTROY_GB_MOB, sizeof(SVGA3dCmdDestroyGBMob), SVGA3D_INVALID_ID); 2578 uint32_t cbCmdRequired = 0; 2579 SvgaMobDestroy(pSvga, pCOT->pMob, NULL, 0, &cbCmdRequired); 2580 pvCmd = SvgaCmdBufReserve(pSvga, cbCmdRequired, SVGA3D_INVALID_ID); 2519 2581 if (pvCmd) 2520 2582 { 2521 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pvCmd; 2522 pCmd->mobid = VMSVGAMOB_ID(pCOT->pMob); 2523 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 2524 } 2525 else 2526 AssertFailedReturnStmt(SvgaMobFree(pSvga, pMob); RTR0MemObjFree(hMemObjCOT, true), 2527 STATUS_INSUFFICIENT_RESOURCES); 2528 2529 /* Command buffer completion callback to free the COT. */ 2530 struct VMSVGACOTFREE callbackData; 2531 callbackData.pMob = pCOT->pMob; 2532 callbackData.hMemObj = pCOT->hMemObj; 2533 SvgaCmdBufSetCompletionCallback(pSvga, svgaCOTMobFreeCb, &callbackData, sizeof(callbackData)); 2583 SvgaMobDestroy(pSvga, pCOT->pMob, pvCmd, cbCmdRequired, &cbCmdRequired); 2584 SvgaCmdBufCommit(pSvga, cbCmdRequired); 2585 } 2534 2586 2535 2587 pCOT->pMob = NULL; 2536 pCOT->hMemObj = NIL_RTR0MEMOBJ;2537 2588 } 2538 2589 … … 2540 2591 2541 2592 pCOT->pMob = pMob; 2542 pCOT->hMemObj = hMemObjCOT;2543 2593 pCOT->cEntries = cbCOT / s_acbEntry[enmType]; 2544 2594 2545 2595 return STATUS_SUCCESS; 2546 2596 } 2597 2598 2599 2600 /* 2601 * Place mob destruction commands into the buffer and add the mob to the deferred destruction list. 2602 * 2603 * Makes sure that the MOB, in particular the mobid, is deallocated by the guest after the MOB deletion 2604 * has been completed by the host. 2605 * 2606 * SVGA_3D_CMD_DESTROY_GB_MOB can be submitted to the host either in the miniport command buffer 2607 * (VMSVGACBSTATE::pCBCurrent) or in a paging buffer due to DXGK_OPERATION_UNMAP_APERTURE_SEGMENT operation. 2608 * These two ways are not synchronized. Therefore it is possible that the guest deletes a mob for an aperture segment 2609 * in a paging buffer then allocates the same mobid and sends SVGA_3D_CMD_DEFINE_GB_MOB64 to the host for a COTable 2610 * before the paging buffer is sent to the host. 2611 * 2612 * The driver uses SVGA_3D_CMD_DX_MOB_FENCE_64 command to notify the driver that the host had deleted a mob 2613 * and frees deleted mobs in the DPC routine 2614 */ 2615 NTSTATUS SvgaMobDestroy(VBOXWDDM_EXT_VMSVGA *pSvga, 2616 PVMSVGAMOB pMob, 2617 void *pvCmd, 2618 uint32_t cbReserved, 2619 uint32_t *pcbCmd) 2620 { 2621 uint32_t cbRequired = sizeof(SVGA3dCmdHeader) + sizeof(SVGA3dCmdDestroyGBMob) 2622 + sizeof(SVGA3dCmdHeader) + sizeof(SVGA3dCmdDXMobFence64); 2623 2624 *pcbCmd = cbRequired; 2625 if (cbReserved < cbRequired) 2626 return STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER; 2627 2628 uint8_t *pu8Cmd = (uint8_t *)pvCmd; 2629 SVGA3dCmdHeader *pHdr; 2630 2631 pHdr = (SVGA3dCmdHeader *)pu8Cmd; 2632 pHdr->id = SVGA_3D_CMD_DESTROY_GB_MOB; 2633 pHdr->size = sizeof(SVGA3dCmdDestroyGBMob); 2634 pu8Cmd += sizeof(*pHdr); 2635 2636 { 2637 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pu8Cmd; 2638 pCmd->mobid = VMSVGAMOB_ID(pMob); 2639 pu8Cmd += sizeof(*pCmd); 2640 } 2641 2642 pMob->u64MobFence = ASMAtomicIncU64(&pSvga->u64MobFence); 2643 2644 pHdr = (SVGA3dCmdHeader *)pu8Cmd; 2645 pHdr->id = SVGA_3D_CMD_DX_MOB_FENCE_64; 2646 pHdr->size = sizeof(SVGA3dCmdDXMobFence64); 2647 pu8Cmd += sizeof(*pHdr); 2648 2649 { 2650 SVGA3dCmdDXMobFence64 *pCmd = (SVGA3dCmdDXMobFence64 *)pu8Cmd; 2651 pCmd->value = pMob->u64MobFence; 2652 pCmd->mobId = VMSVGAMOB_ID(pSvga->pMiniportMob); 2653 pCmd->mobOffset = RT_OFFSETOF(VMSVGAMINIPORTMOB, u64MobFence); 2654 pu8Cmd += sizeof(*pCmd); 2655 } 2656 2657 /* Add the mob to the deferred destruction queue. */ 2658 KIRQL OldIrql; 2659 SvgaHostObjectsLock(pSvga, &OldIrql); 2660 RTListAppend(&pSvga->listMobDeferredDestruction, &pMob->node); 2661 SvgaHostObjectsUnlock(pSvga, OldIrql); 2662 2663 Assert((uintptr_t)pu8Cmd - (uintptr_t)pvCmd == cbRequired); 2664 2665 return STATUS_SUCCESS; 2666 } -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/Svga.h
r98103 r99990 179 179 HANDLE hAllocation; /* Allocation which is bound to the mob. */ 180 180 VMSVGAGBO gbo; /* Gbo for this mob. */ 181 RTR0MEMOBJ hMemObj; /* The guest memory if allocated by miniport. */ 182 uint32_t u64MobFence; /* Free by the guest when the host reports this fence value. */ 183 RTLISTNODE node; /* VBOXWDDM_EXT_VMSVGA::listMobDeferredDestruction */ 181 184 } VMSVGAMOB, *PVMSVGAMOB; 182 185 … … 257 260 VMSVGAOT aOT[SVGA_OTABLE_DX_MAX]; 258 261 262 PVMSVGAMOB pMiniportMob; /* Used by miniport to communicate with the device. */ 263 struct VMSVGAMINIPORTMOB volatile *pMiniportMobData; /* Pointer to the miniport mob content. */ 264 265 uint64_t volatile u64MobFence; 266 RTLISTANCHOR listMobDeferredDestruction; /* Mob to be deleted after. */ 267 259 268 /** Bitmap of used GMR ids. Bit 0 - GMR id 0, etc. */ 260 269 uint32_t *pu32GMRBits; /* Number of GMRs is controlled by the host (u32GmrMaxIds), so allocate the bitmap. */ … … 275 284 typedef struct VBOXWDDM_EXT_VMSVGA *PVBOXWDDM_EXT_VMSVGA; 276 285 286 typedef struct VMSVGAMINIPORTMOB 287 { 288 uint64_t u64MobFence; /* Host writes SVGA3dCmdDXMobFence64::value here. */ 289 } VMSVGAMINIPORTMOB; 290 277 291 typedef struct VMSVGACOT 278 292 { 279 293 PVMSVGAMOB pMob; /* COTable mob. */ 280 RTR0MEMOBJ hMemObj; /* COTable pages. */281 294 uint32_t cEntries; /* How many objects can be stored in the COTable. */ 282 295 } VMSVGACOT, *PVMSVGACOT; … … 629 642 uint32_t cMobPages, 630 643 HANDLE hAllocation); 644 NTSTATUS SvgaMobSetMemObj(PVMSVGAMOB pMob, 645 RTR0MEMOBJ hMemObj); 646 NTSTATUS SvgaMobDestroy(VBOXWDDM_EXT_VMSVGA *pSvga, 647 PVMSVGAMOB pMob, 648 void *pvCmd, 649 uint32_t cbReserved, 650 uint32_t *pcbCmd); 631 651 632 652 NTSTATUS SvgaCOTNotifyId(VBOXWDDM_EXT_VMSVGA *pSvga, -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/SvgaFifo.cpp
r99862 r99990 873 873 874 874 875 bool SvgaCmdBufIsIdle(PVBOXWDDM_EXT_VMSVGA pSvga) 876 { 877 PVMSVGACBSTATE pCBState = pSvga->pCBState; 878 879 bool fIdle = true; 880 881 KIRQL OldIrql; 882 KeAcquireSpinLock(&pCBState->SpinLock, &OldIrql); 883 for (unsigned i = 0; i < RT_ELEMENTS(pCBState->aCBContexts); ++i) 884 { 885 PVMSVGACBCONTEXT pCBCtx = &pCBState->aCBContexts[i]; 886 if (pCBCtx->cSubmitted > 0) 887 { 888 fIdle = false; 889 break; 890 } 891 } 892 KeReleaseSpinLock(&pCBState->SpinLock, OldIrql); 893 894 return fIdle; 895 } 896 897 875 898 void SvgaCmdBufSetCompletionCallback(PVBOXWDDM_EXT_VMSVGA pSvga, PFNCBCOMPLETION pfn, void const *pv, uint32_t cb) 876 899 { -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/SvgaFifo.h
r98103 r99990 52 52 void SvgaCmdBufProcess(PVBOXWDDM_EXT_VMSVGA pSvga); 53 53 void SvgaCmdBufSetCompletionCallback(PVBOXWDDM_EXT_VMSVGA pSvga, PFNCBCOMPLETION pfn, void const *pv, uint32_t cb); 54 bool SvgaCmdBufIsIdle(PVBOXWDDM_EXT_VMSVGA pSvga); 54 55 55 56 NTSTATUS SvgaCmdBufAllocUMD(PVBOXWDDM_EXT_VMSVGA pSvga, PHYSICAL_ADDRESS DmaBufferPhysicalAddress, -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/VBoxMPDX.cpp
r99677 r99990 101 101 AssertReturnVoid(pAllocation->dx.SegmentId == 3 || pAllocation->dx.desc.fPrimary); 102 102 103 void *pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DESTROY_GB_MOB, sizeof(SVGA3dCmdDestroyGBMob), SVGA3D_INVALID_ID); 103 uint32_t cbRequired = 0; 104 SvgaMobDestroy(pSvga, pAllocation->dx.gb.pMob, NULL, 0, &cbRequired); 105 void *pvCmd = SvgaCmdBufReserve(pSvga, cbRequired, SVGA3D_INVALID_ID); 104 106 if (pvCmd) 105 107 { 106 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pvCmd; 107 pCmd->mobid = VMSVGAMOB_ID(pAllocation->dx.gb.pMob); 108 SvgaCmdBufCommit(pSvga, sizeof(SVGA3dCmdDestroyGBMob)); 109 } 110 111 if (pAllocation->dx.gb.pMob) 112 { 113 SvgaMobFree(pSvga, pAllocation->dx.gb.pMob); 114 pAllocation->dx.gb.pMob = NULL; 115 } 116 117 if (pAllocation->dx.gb.hMemObjGB != NIL_RTR0MEMOBJ) 118 { 119 RTR0MemObjFree(pAllocation->dx.gb.hMemObjGB, true); 120 pAllocation->dx.gb.hMemObjGB = NIL_RTR0MEMOBJ; 121 } 122 108 SvgaMobDestroy(pSvga, pAllocation->dx.gb.pMob, pvCmd, cbRequired, &cbRequired); 109 SvgaCmdBufCommit(pSvga, cbRequired); 110 } 111 112 pAllocation->dx.gb.pMob = NULL; 123 113 pAllocation->dx.mobid = SVGA3D_INVALID_ID; 124 114 } … … 132 122 133 123 /* Allocate guest backing pages. */ 134 int rc = RTR0MemObjAllocPageTag(&pAllocation->dx.gb.hMemObjGB, cbGB, false /* executable R0 mapping */, "VMSVGAGB"); 124 RTR0MEMOBJ hMemObjGB; 125 int rc = RTR0MemObjAllocPageTag(&hMemObjGB, cbGB, false /* executable R0 mapping */, "VMSVGAGB"); 135 126 AssertRCReturn(rc, STATUS_INSUFFICIENT_RESOURCES); 136 127 … … 140 131 if (NT_SUCCESS(Status)) 141 132 { 142 Status = Svga GboFillPageTableForMemObj(&pAllocation->dx.gb.pMob->gbo, pAllocation->dx.gb.hMemObjGB);133 Status = SvgaMobSetMemObj(pAllocation->dx.gb.pMob, hMemObjGB); 143 134 Assert(NT_SUCCESS(Status)); 144 135 if (NT_SUCCESS(Status)) … … 755 746 if (pBuildPagingBuffer->Fill.Destination.SegmentId == 3 || pAllocation->dx.desc.fPrimary) 756 747 { 757 AssertReturn(pAllocation->dx.gb. hMemObjGB!= NIL_RTR0MEMOBJ, STATUS_INVALID_PARAMETER);758 pvDst = RTR0MemObjAddress(pAllocation->dx.gb. hMemObjGB);748 AssertReturn(pAllocation->dx.gb.pMob->hMemObj != NIL_RTR0MEMOBJ, STATUS_INVALID_PARAMETER); 749 pvDst = RTR0MemObjAddress(pAllocation->dx.gb.pMob->hMemObj); 759 750 } 760 751 else … … 948 939 AssertReturn(pMob, STATUS_INVALID_PARAMETER); 949 940 950 uint32_t cbRequired = sizeof(SVGA3dCmdHeader) + sizeof(SVGA3dCmdDestroyGBMob); 941 uint32_t cbRequired = 0; 942 SvgaMobDestroy(pSvga, pMob, NULL, 0, &cbRequired); 951 943 if (pAllocation->dx.desc.enmAllocationType == VBOXDXALLOCATIONTYPE_SURFACE) 952 {953 944 cbRequired += sizeof(SVGA3dCmdHeader) + sizeof(SVGA3dCmdBindGBSurface); 954 }955 945 956 946 if (pBuildPagingBuffer->DmaSize < cbRequired) 957 {958 947 return STATUS_GRAPHICS_INSUFFICIENT_DMA_BUFFER; 959 }960 948 961 949 uint8_t *pu8Cmd = (uint8_t *)pBuildPagingBuffer->pDmaBuffer; … … 978 966 } 979 967 980 pHdr = (SVGA3dCmdHeader *)pu8Cmd; 981 pHdr->id = SVGA_3D_CMD_DESTROY_GB_MOB; 982 pHdr->size = sizeof(SVGA3dCmdDestroyGBMob); 983 pu8Cmd += sizeof(*pHdr); 984 985 { 986 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pu8Cmd; 987 pCmd->mobid = pAllocation->dx.mobid; 988 pu8Cmd += sizeof(*pCmd); 989 } 968 uint32_t cbCmd = 0; 969 NTSTATUS Status = SvgaMobDestroy(pSvga, pMob, pu8Cmd, 970 cbRequired - ((uintptr_t)pu8Cmd - (uintptr_t)pBuildPagingBuffer->pDmaBuffer), 971 &cbCmd); 972 AssertReturn(NT_SUCCESS(Status), Status); 973 pu8Cmd += cbCmd; 974 975 pAllocation->dx.mobid = SVGA3D_INVALID_ID; 990 976 991 977 *pcbCommands = (uintptr_t)pu8Cmd - (uintptr_t)pBuildPagingBuffer->pDmaBuffer; 992 993 SvgaMobFree(pSvga, pMob);994 pAllocation->dx.mobid = SVGA3D_INVALID_ID;995 996 978 return STATUS_SUCCESS; 997 979 } -
trunk/src/VBox/Additions/WINNT/Graphics/Video/mp/wddm/gallium/VBoxMPGaWddm.cpp
r99833 r99990 249 249 } 250 250 251 pvCmd = SvgaCmdBuf3dCmdReserve(pSvga, SVGA_3D_CMD_DESTROY_GB_MOB, sizeof(SVGA3dCmdDestroyGBMob), SVGA3D_INVALID_ID); 251 uint32_t cbRequired = 0; 252 SvgaMobDestroy(pSvga, pCOT->pMob, NULL, 0, &cbRequired); 253 pvCmd = SvgaCmdBufReserve(pSvga, cbRequired, SVGA3D_INVALID_ID); 252 254 if (pvCmd) 253 255 { 254 SVGA3dCmdDestroyGBMob *pCmd = (SVGA3dCmdDestroyGBMob *)pvCmd; 255 pCmd->mobid = VMSVGAMOB_ID(pCOT->pMob); 256 SvgaCmdBufCommit(pSvga, sizeof(*pCmd)); 256 SvgaMobDestroy(pSvga, pCOT->pMob, pvCmd, cbRequired, &cbRequired); 257 SvgaCmdBufCommit(pSvga, cbRequired); 257 258 } 258 259 259 SvgaMobFree(pSvga, pCOT->pMob);260 260 pCOT->pMob = NULL; 261 }262 263 if (pCOT->hMemObj != NIL_RTR0MEMOBJ)264 {265 RTR0MemObjFree(pCOT->hMemObj, true);266 pCOT->hMemObj = NIL_RTR0MEMOBJ;267 261 } 268 262 } … … 391 385 } 392 386 else if (u32FenceA == u32FenceB) 387 { 388 /* FenceA is equal to FenceB. */ 389 return 0; 390 } 391 392 /* FenceA is older than FenceB. */ 393 return 1; 394 } 395 396 397 static int gaFenceCmp64(uint64_t u64FenceA, uint64_t u64FenceB) 398 { 399 if ( u64FenceA < u64FenceB 400 || u64FenceA - u64FenceB > UINT64_MAX / 2) 401 { 402 return -1; /* FenceA is newer than FenceB. */ 403 } 404 else if (u64FenceA == u64FenceB) 393 405 { 394 406 /* FenceA is equal to FenceB. */ … … 1620 1632 if (ASMAtomicCmpXchgBool(&pSvga->fCommandBufferIrq, false, true) && pSvga->pCBState) 1621 1633 SvgaCmdBufProcess(pSvga); 1634 1635 /* 1636 * Deferred MOB destruction. 1637 */ 1638 if (pSvga->pMiniportMobData) 1639 { 1640 uint64_t const u64MobFence = pSvga->pMiniportMobData->u64MobFence; 1641 1642 /* Move mobs which were deleted by the host to the local list under the lock. */ 1643 RTLISTANCHOR listDestroyedMobs; 1644 RTListInit(&listDestroyedMobs); 1645 1646 SvgaHostObjectsLock(pSvga, &OldIrql); 1647 1648 if (!RTListIsEmpty(&pSvga->listMobDeferredDestruction)) 1649 { 1650 PVMSVGAMOB pIter, pNext; 1651 RTListForEachSafe(&pSvga->listMobDeferredDestruction, pIter, pNext, VMSVGAMOB, node) 1652 { 1653 if (gaFenceCmp64(pIter->u64MobFence, u64MobFence) <= 0) 1654 { 1655 RTListNodeRemove(&pIter->node); 1656 RTListAppend(&listDestroyedMobs, &pIter->node); 1657 } 1658 } 1659 } 1660 1661 SvgaHostObjectsUnlock(pSvga, OldIrql); 1662 1663 if (!RTListIsEmpty(&listDestroyedMobs)) 1664 { 1665 PVMSVGAMOB pIter, pNext; 1666 RTListForEachSafe(&listDestroyedMobs, pIter, pNext, VMSVGAMOB, node) 1667 { 1668 /* Delete the data. SvgaMobFree deallocates pIter. */ 1669 RTListNodeRemove(&pIter->node); 1670 SvgaMobFree(pSvga, pIter); 1671 } 1672 } 1673 } 1622 1674 } 1623 1675
Note:
See TracChangeset
for help on using the changeset viewer.