Changeset 51005 in vbox for trunk/src/VBox/Devices/Graphics
- Timestamp:
- Apr 9, 2014 8:58:50 AM (11 years ago)
- Location:
- trunk/src/VBox/Devices/Graphics
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/Devices/Graphics/DevVGA.cpp
r50940 r51005 5399 5399 SSMR3PutBool(pSSM, true); 5400 5400 int rc = vboxVBVASaveStateExec(pDevIns, pSSM); 5401 # ifdef VBOX_WITH_VDMA5402 vboxVDMASaveStateExecDone(pThis->pVdma, pSSM);5403 # endif5404 5401 #else 5405 5402 int rc = SSMR3PutBool(pSSM, false); 5403 #endif 5404 5405 AssertRCReturn(rc, rc); 5406 5407 #ifdef VBOX_WITH_VDMA 5408 rc = SSMR3PutU32(pSSM, 1); 5409 AssertRCReturn(rc, rc); 5410 rc = vboxVDMASaveStateExecPerform(pThis->pVdma, pSSM); 5411 #else 5412 rc = SSMR3PutU32(pSSM, 0); 5413 #endif 5414 AssertRCReturn(rc, rc); 5415 5416 #ifdef VBOX_WITH_VDMA 5417 vboxVDMASaveStateExecDone(pThis->pVdma, pSSM); 5406 5418 #endif 5407 5419 … … 5463 5475 #endif 5464 5476 } 5477 5478 if (uVersion >= VGA_SAVEDSTATE_VERSION_3D) 5479 { 5480 uint32_t u32; 5481 rc = SSMR3GetU32(pSSM, &u32); 5482 if (u32) 5483 { 5484 #ifdef VBOX_WITH_VDMA 5485 if (u32 == 1) 5486 { 5487 rc = vboxVDMASaveLoadExecPerform(pThis->pVdma, pSSM, uVersion); 5488 AssertRCReturn(rc, rc); 5489 } 5490 else 5491 #endif 5492 { 5493 LogRel(("invalid CmdVbva version info\n")); 5494 return VERR_VERSION_MISMATCH; 5495 } 5496 } 5497 } 5498 5465 5499 #ifdef VBOX_WITH_VMSVGA 5466 5500 if ( uVersion >= VGA_SAVEDSTATE_VERSION_VMSVGA_2D -
trunk/src/VBox/Devices/Graphics/DevVGA.h
r50940 r51005 643 643 int vboxVDMASaveStateExecPrep(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM); 644 644 int vboxVDMASaveStateExecDone(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM); 645 int vboxVDMASaveStateExecPerform(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM); 646 int vboxVDMASaveLoadExecPerform(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM, uint32_t u32Version); 645 647 # endif /* VBOX_WITH_VDMA */ 646 648 -
trunk/src/VBox/Devices/Graphics/DevVGASavedState.h
r50518 r51005 23 23 #define Graphics_DevVGASavedState_h 24 24 25 #define VGA_SAVEDSTATE_VERSION 12 25 #define VGA_SAVEDSTATE_VERSION 13 26 #define VGA_SAVEDSTATE_VERSION_3D 13 26 27 #define VGA_SAVEDSTATE_VERSION_HGSMIMA 12 /* HGSMI memory allocator. */ 27 28 #define VGA_SAVEDSTATE_VERSION_VMSVGA 11 -
trunk/src/VBox/Devices/Graphics/DevVGA_VDMA.cpp
r50940 r51005 48 48 } while (0) 49 49 50 #define VBOXVDMATHREAD_STATE_TERMINATED 0 51 #define VBOXVDMATHREAD_STATE_CREATED 1 52 #define VBOXVDMATHREAD_STATE_TERMINATING 2 50 #define VBOXVDMATHREAD_STATE_TERMINATED 0 51 #define VBOXVDMATHREAD_STATE_CREATING 1 52 #define VBOXVDMATHREAD_STATE_CREATED 3 53 #define VBOXVDMATHREAD_STATE_TERMINATING 4 54 55 struct VBOXVDMATHREAD; 56 57 typedef DECLCALLBACKPTR(void, PFNVBOXVDMATHREAD_CHANGED)(struct VBOXVDMATHREAD *pThread, int rc, void *pvThreadContext, void *pvChangeContext); 53 58 54 59 typedef struct VBOXVDMATHREAD … … 56 61 RTTHREAD hWorkerThread; 57 62 RTSEMEVENT hEvent; 58 RTSEMEVENT hClientEvent;59 63 volatile uint32_t u32State; 64 PFNVBOXVDMATHREAD_CHANGED pfnChanged; 65 void *pvChanged; 60 66 } VBOXVDMATHREAD, *PVBOXVDMATHREAD; 61 67 … … 95 101 VBVAEXHOSTCTL_TYPE_HH_INTERNAL_PAUSE, 96 102 VBVAEXHOSTCTL_TYPE_HH_INTERNAL_RESUME, 97 VBVAEXHOSTCTL_TYPE_HH_ENABLE,98 VBVAEXHOSTCTL_TYPE_HH_TERM,99 VBVAEXHOSTCTL_TYPE_HH_ON_HGCM_TERM,100 VBVAEXHOSTCTL_TYPE_HH_RESET,101 103 VBVAEXHOSTCTL_TYPE_HH_SAVESTATE, 102 104 VBVAEXHOSTCTL_TYPE_HH_LOADSTATE, 103 105 VBVAEXHOSTCTL_TYPE_HH_BE_OPAQUE, 106 VBVAEXHOSTCTL_TYPE_HH_ON_HGCM_UNLOAD, 104 107 VBVAEXHOSTCTL_TYPE_GHH_BE_OPAQUE, 105 VBVAEXHOSTCTL_TYPE_GH_ENABLE_DISABLE 108 VBVAEXHOSTCTL_TYPE_GHH_ENABLE, 109 VBVAEXHOSTCTL_TYPE_GHH_DISABLE, 110 VBVAEXHOSTCTL_TYPE_GH_MIN = VBVAEXHOSTCTL_TYPE_GHH_BE_OPAQUE, 111 VBVAEXHOSTCTL_TYPE_GH_MAX = VBVAEXHOSTCTL_TYPE_GHH_DISABLE 106 112 } VBVAEXHOSTCTL_TYPE; 107 113 … … 242 248 static VBVAEXHOSTCTL* VBoxVBVAExHPCheckHostCtlOnDisable(struct VBVAEXHOSTCONTEXT *pCmdVbva) 243 249 { 244 bool fHostCtl; 245 return vboxVBVAExHPCheckCtl(pCmdVbva, &fHostCtl, true); 246 } 247 250 bool fHostCtl = false; 251 VBVAEXHOSTCTL* pCtl = vboxVBVAExHPCheckCtl(pCmdVbva, &fHostCtl, true); 252 Assert(!pCtl || fHostCtl); 253 return pCtl; 254 } 248 255 249 256 static bool vboxVBVAExHPCheckProcessCtlInternal(struct VBVAEXHOSTCONTEXT *pCmdVbva, VBVAEXHOSTCTL* pCtl) … … 254 261 if (pCmdVbva->i32EnableState > VBVAEXHOSTCONTEXT_ESTATE_PAUSED) 255 262 ASMAtomicWriteS32(&pCmdVbva->i32EnableState, VBVAEXHOSTCONTEXT_ESTATE_PAUSED); 263 VBoxVBVAExHPDataCompleteCtl(pCmdVbva, pCtl, VINF_SUCCESS); 256 264 return true; 257 265 case VBVAEXHOSTCTL_TYPE_HH_INTERNAL_RESUME: 258 266 if (pCmdVbva->i32EnableState == VBVAEXHOSTCONTEXT_ESTATE_PAUSED) 259 267 ASMAtomicWriteS32(&pCmdVbva->i32EnableState, VBVAEXHOSTCONTEXT_ESTATE_ENABLED); 268 VBoxVBVAExHPDataCompleteCtl(pCmdVbva, pCtl, VINF_SUCCESS); 260 269 return true; 261 270 default: … … 378 387 return VBVAEXHOST_DATA_TYPE_HOSTCTL; 379 388 } 389 continue; 380 390 } 381 391 else … … 526 536 } 527 537 538 DECLINLINE(bool) VBoxVBVAExHSIsDisabled(struct VBVAEXHOSTCONTEXT *pCmdVbva) 539 { 540 return (ASMAtomicUoReadS32(&pCmdVbva->i32EnableState) == VBVAEXHOSTCONTEXT_ESTATE_DISABLED); 541 } 542 528 543 static int VBoxVBVAExHSEnable(struct VBVAEXHOSTCONTEXT *pCmdVbva, VBVABUFFER *pVBVA) 529 544 { 530 545 if (VBoxVBVAExHSIsEnabled(pCmdVbva)) 531 return VINF_ALREADY_INITIALIZED; 546 { 547 WARN(("VBVAEx is enabled already\n")); 548 return VERR_INVALID_STATE; 549 } 532 550 533 551 pCmdVbva->pVBVA = pVBVA; … … 539 557 static int VBoxVBVAExHSDisable(struct VBVAEXHOSTCONTEXT *pCmdVbva) 540 558 { 541 if ( !VBoxVBVAExHSIsEnabled(pCmdVbva))559 if (VBoxVBVAExHSIsDisabled(pCmdVbva)) 542 560 return VINF_SUCCESS; 543 561 … … 567 585 } 568 586 587 static int vboxVBVAExHSSaveGuestCtl(struct VBVAEXHOSTCONTEXT *pCmdVbva, VBVAEXHOSTCTL* pCtl, uint8_t* pu8VramBase, PSSMHANDLE pSSM) 588 { 589 if (VBVAEXHOSTCTL_TYPE_GH_MIN > pCtl->enmType || VBVAEXHOSTCTL_TYPE_GH_MAX < pCtl->enmType) 590 { 591 WARN(("unexpected command type!\n")); 592 return VERR_INTERNAL_ERROR; 593 } 594 595 int rc = SSMR3PutU32(pSSM, pCtl->enmType); 596 AssertRCReturn(rc, rc); 597 rc = SSMR3PutU32(pSSM, pCtl->u.cmd.cbCmd); 598 AssertRCReturn(rc, rc); 599 rc = SSMR3PutU32(pSSM, (uint32_t)(pCtl->u.cmd.pu8Cmd - pu8VramBase)); 600 AssertRCReturn(rc, rc); 601 602 return VINF_SUCCESS; 603 } 604 605 static int vboxVBVAExHSSaveStateLocked(struct VBVAEXHOSTCONTEXT *pCmdVbva, uint8_t* pu8VramBase, PSSMHANDLE pSSM) 606 { 607 if (ASMAtomicUoReadS32(&pCmdVbva->i32EnableState) != VBVAEXHOSTCONTEXT_ESTATE_PAUSED) 608 { 609 WARN(("vbva not paused\n")); 610 return VERR_INVALID_STATE; 611 } 612 613 VBVAEXHOSTCTL* pCtl; 614 int rc; 615 RTListForEach(&pCmdVbva->GuestCtlList, pCtl, VBVAEXHOSTCTL, Node) 616 { 617 rc = vboxVBVAExHSSaveGuestCtl(pCmdVbva, pCtl, pu8VramBase, pSSM); 618 AssertRCReturn(rc, rc); 619 } 620 621 rc = SSMR3PutU32(pSSM, 0); 622 AssertRCReturn(rc, rc); 623 624 return VINF_SUCCESS; 625 } 569 626 /* Saves state 570 627 * @returns - same as VBoxVBVAExHSCheckCommands, or failure on load state fail … … 572 629 static int VBoxVBVAExHSSaveState(struct VBVAEXHOSTCONTEXT *pCmdVbva, uint8_t* pu8VramBase, PSSMHANDLE pSSM) 573 630 { 631 int rc = RTCritSectEnter(&pCmdVbva->CltCritSect); 632 if (RT_FAILURE(rc)) 633 { 634 WARN(("RTCritSectEnter failed %d\n", rc)); 635 return rc; 636 } 637 638 rc = vboxVBVAExHSSaveStateLocked(pCmdVbva, pu8VramBase, pSSM); 639 if (RT_FAILURE(rc)) 640 WARN(("vboxVBVAExHSSaveStateLocked failed %d\n", rc)); 641 642 RTCritSectLeave(&pCmdVbva->CltCritSect); 643 644 return rc; 645 } 646 647 static int vboxVBVAExHSLoadGuestCtl(struct VBVAEXHOSTCONTEXT *pCmdVbva, uint8_t* pu8VramBase, PSSMHANDLE pSSM, uint32_t u32Version) 648 { 649 uint32_t u32; 650 int rc = SSMR3GetU32(pSSM, &u32); 651 AssertRCReturn(rc, rc); 652 653 if (!u32) 654 return VINF_EOF; 655 656 if (VBVAEXHOSTCTL_TYPE_GH_MIN > u32 || VBVAEXHOSTCTL_TYPE_GH_MAX < u32) 657 { 658 WARN(("unexpected command type!\n")); 659 return VERR_INTERNAL_ERROR; 660 } 661 662 VBVAEXHOSTCTL* pHCtl = VBoxVBVAExHCtlCreate(pCmdVbva, (VBVAEXHOSTCTL_TYPE)u32); 663 if (!pHCtl) 664 { 665 WARN(("VBoxVBVAExHCtlCreate failed\n")); 666 return VERR_NO_MEMORY; 667 } 668 669 rc = SSMR3GetU32(pSSM, &u32); 670 AssertRCReturn(rc, rc); 671 pHCtl->u.cmd.cbCmd = u32; 672 673 rc = SSMR3GetU32(pSSM, &u32); 674 AssertRCReturn(rc, rc); 675 pHCtl->u.cmd.pu8Cmd = pu8VramBase + u32; 676 677 RTListAppend(&pCmdVbva->GuestCtlList, &pHCtl->Node); 678 ++pCmdVbva->u32cCtls; 679 680 return VINF_SUCCESS; 681 } 682 683 684 static int vboxVBVAExHSLoadStateLocked(struct VBVAEXHOSTCONTEXT *pCmdVbva, uint8_t* pu8VramBase, PSSMHANDLE pSSM, uint32_t u32Version) 685 { 686 if (ASMAtomicUoReadS32(&pCmdVbva->i32EnableState) != VBVAEXHOSTCONTEXT_ESTATE_PAUSED) 687 { 688 WARN(("vbva not stopped\n")); 689 return VERR_INVALID_STATE; 690 } 691 574 692 int rc; 575 693 576 int32_t i32EnableState = ASMAtomicUoReadS32(&pCmdVbva->i32EnableState); 577 if (i32EnableState >= VBVAEXHOSTCONTEXT_ESTATE_PAUSED) 578 { 579 if (i32EnableState != VBVAEXHOSTCONTEXT_ESTATE_PAUSED) 580 { 581 WARN(("vbva not paused\n")); 582 return VERR_INVALID_STATE; 583 } 584 585 rc = SSMR3PutU32(pSSM, (uint32_t)(((uint8_t*)pCmdVbva->pVBVA) - pu8VramBase)); 694 do { 695 rc = vboxVBVAExHSLoadGuestCtl(pCmdVbva, pu8VramBase, pSSM, u32Version); 586 696 AssertRCReturn(rc, rc); 587 return VINF_SUCCESS; 588 } 589 590 rc = SSMR3PutU32(pSSM, 0xffffffff); 591 AssertRCReturn(rc, rc); 697 } while (VINF_EOF != rc); 592 698 593 699 return VINF_SUCCESS; 594 700 } 595 596 typedef enum597 {598 VBVAEXHOSTCTL_SOURCE_GUEST = 0,599 VBVAEXHOSTCTL_SOURCE_HOST_ANY,600 VBVAEXHOSTCTL_SOURCE_HOST_ENABLED601 } VBVAEXHOSTCTL_SOURCE;602 603 604 static int VBoxVBVAExHCtlSubmit(VBVAEXHOSTCONTEXT *pCmdVbva, VBVAEXHOSTCTL* pCtl, VBVAEXHOSTCTL_SOURCE enmSource, PFNVBVAEXHOSTCTL_COMPLETE pfnComplete, void *pvComplete)605 {606 if ((enmSource == VBVAEXHOSTCTL_SOURCE_HOST_ENABLED) && !VBoxVBVAExHSIsEnabled(pCmdVbva))607 {608 Log(("cmd vbva not enabled\n"));609 return VERR_INVALID_STATE;610 }611 612 pCtl->pfnComplete = pfnComplete;613 pCtl->pvComplete = pvComplete;614 615 int rc = RTCritSectEnter(&pCmdVbva->CltCritSect);616 if (RT_SUCCESS(rc))617 {618 if (enmSource > VBVAEXHOSTCTL_SOURCE_GUEST)619 {620 if ((enmSource == VBVAEXHOSTCTL_SOURCE_HOST_ENABLED) && !VBoxVBVAExHSIsEnabled(pCmdVbva))621 {622 Log(("cmd vbva not enabled\n"));623 RTCritSectLeave(&pCmdVbva->CltCritSect);624 return VERR_INVALID_STATE;625 }626 RTListAppend(&pCmdVbva->HostCtlList, &pCtl->Node);627 }628 else629 RTListAppend(&pCmdVbva->GuestCtlList, &pCtl->Node);630 631 ASMAtomicIncU32(&pCmdVbva->u32cCtls);632 633 RTCritSectLeave(&pCmdVbva->CltCritSect);634 635 rc = VBoxVBVAExHSCheckCommands(pCmdVbva);636 }637 else638 WARN(("RTCritSectEnter failed %d\n", rc));639 640 return rc;641 }642 643 701 644 702 /* Loads state … … 647 705 static int VBoxVBVAExHSLoadState(struct VBVAEXHOSTCONTEXT *pCmdVbva, uint8_t* pu8VramBase, PSSMHANDLE pSSM, uint32_t u32Version) 648 706 { 649 AssertMsgFailed(("implement!\n")); 650 uint32_t u32; 651 int rc = SSMR3GetU32(pSSM, &u32); 652 AssertRCReturn(rc, rc); 653 if (u32 != 0xffffffff) 654 { 655 VBVABUFFER *pVBVA = (VBVABUFFER*)pu8VramBase + u32; 656 rc = VBoxVBVAExHSEnable(pCmdVbva, pVBVA); 657 AssertRCReturn(rc, rc); 658 return VBoxVBVAExHSCheckCommands(pCmdVbva); 659 } 660 661 return VINF_SUCCESS; 707 Assert(VGA_SAVEDSTATE_VERSION_3D <= u32Version); 708 int rc = RTCritSectEnter(&pCmdVbva->CltCritSect); 709 if (RT_FAILURE(rc)) 710 { 711 WARN(("RTCritSectEnter failed %d\n", rc)); 712 return rc; 713 } 714 715 rc = vboxVBVAExHSLoadStateLocked(pCmdVbva, pu8VramBase, pSSM, u32Version); 716 if (RT_FAILURE(rc)) 717 WARN(("vboxVBVAExHSSaveStateLocked failed %d\n", rc)); 718 719 RTCritSectLeave(&pCmdVbva->CltCritSect); 720 721 return rc; 722 } 723 724 typedef enum 725 { 726 VBVAEXHOSTCTL_SOURCE_GUEST = 0, 727 VBVAEXHOSTCTL_SOURCE_HOST 728 } VBVAEXHOSTCTL_SOURCE; 729 730 731 static int VBoxVBVAExHCtlSubmit(VBVAEXHOSTCONTEXT *pCmdVbva, VBVAEXHOSTCTL* pCtl, VBVAEXHOSTCTL_SOURCE enmSource, PFNVBVAEXHOSTCTL_COMPLETE pfnComplete, void *pvComplete) 732 { 733 if (!VBoxVBVAExHSIsEnabled(pCmdVbva)) 734 { 735 Log(("cmd vbva not enabled\n")); 736 return VERR_INVALID_STATE; 737 } 738 739 pCtl->pfnComplete = pfnComplete; 740 pCtl->pvComplete = pvComplete; 741 742 int rc = RTCritSectEnter(&pCmdVbva->CltCritSect); 743 if (RT_SUCCESS(rc)) 744 { 745 if (!VBoxVBVAExHSIsEnabled(pCmdVbva)) 746 { 747 Log(("cmd vbva not enabled\n")); 748 RTCritSectLeave(&pCmdVbva->CltCritSect); 749 return VERR_INVALID_STATE; 750 } 751 752 if (enmSource > VBVAEXHOSTCTL_SOURCE_GUEST) 753 { 754 RTListAppend(&pCmdVbva->HostCtlList, &pCtl->Node); 755 } 756 else 757 RTListAppend(&pCmdVbva->GuestCtlList, &pCtl->Node); 758 759 ASMAtomicIncU32(&pCmdVbva->u32cCtls); 760 761 RTCritSectLeave(&pCmdVbva->CltCritSect); 762 763 rc = VBoxVBVAExHSCheckCommands(pCmdVbva); 764 } 765 else 766 WARN(("RTCritSectEnter failed %d\n", rc)); 767 768 return rc; 662 769 } 663 770 … … 681 788 #ifdef VBOX_WITH_CRHGSMI 682 789 683 int VBoxVDMAThreadNotifyConstructSucceeded(PVBOXVDMATHREAD pThread) 684 { 685 Assert(pThread->u32State == VBOXVDMATHREAD_STATE_TERMINATED); 686 pThread->u32State = VBOXVDMATHREAD_STATE_CREATED; 687 int rc = RTSemEventSignal(pThread->hClientEvent); 688 AssertRC(rc); 689 return VINF_SUCCESS; 690 } 691 692 int VBoxVDMAThreadNotifyConstructFailed(PVBOXVDMATHREAD pThread) 693 { 694 Assert(pThread->u32State == VBOXVDMATHREAD_STATE_TERMINATED); 695 int rc = RTSemEventSignal(pThread->hClientEvent); 696 AssertRC(rc); 790 void VBoxVDMAThreadNotifyConstructSucceeded(PVBOXVDMATHREAD pThread, void *pvThreadContext) 791 { 792 Assert(pThread->u32State == VBOXVDMATHREAD_STATE_CREATING); 793 PFNVBOXVDMATHREAD_CHANGED pfnChanged = pThread->pfnChanged; 794 void *pvChanged = pThread->pvChanged; 795 796 pThread->pfnChanged = NULL; 797 pThread->pvChanged = NULL; 798 799 ASMAtomicWriteU32(&pThread->u32State, VBOXVDMATHREAD_STATE_CREATED); 800 801 if (pfnChanged) 802 pfnChanged(pThread, VINF_SUCCESS, pvThreadContext, pvChanged); 803 } 804 805 void VBoxVDMAThreadNotifyTerminatingSucceeded(PVBOXVDMATHREAD pThread, void *pvThreadContext) 806 { 807 Assert(pThread->u32State == VBOXVDMATHREAD_STATE_TERMINATING); 808 PFNVBOXVDMATHREAD_CHANGED pfnChanged = pThread->pfnChanged; 809 void *pvChanged = pThread->pvChanged; 810 811 pThread->pfnChanged = NULL; 812 pThread->pvChanged = NULL; 813 814 if (pfnChanged) 815 pfnChanged(pThread, VINF_SUCCESS, pvThreadContext, pvChanged); 816 } 817 818 DECLINLINE(bool) VBoxVDMAThreadIsTerminating(PVBOXVDMATHREAD pThread) 819 { 820 return ASMAtomicUoReadU32(&pThread->u32State) == VBOXVDMATHREAD_STATE_TERMINATING; 821 } 822 823 void VBoxVDMAThreadInit(PVBOXVDMATHREAD pThread) 824 { 825 memset(pThread, 0, sizeof (*pThread)); 826 pThread->u32State = VBOXVDMATHREAD_STATE_TERMINATED; 827 } 828 829 int VBoxVDMAThreadCleanup(PVBOXVDMATHREAD pThread) 830 { 831 uint32_t u32State = ASMAtomicUoReadU32(&pThread->u32State); 832 switch (u32State) 833 { 834 case VBOXVDMATHREAD_STATE_TERMINATED: 835 return VINF_SUCCESS; 836 case VBOXVDMATHREAD_STATE_TERMINATING: 837 { 838 int rc = RTThreadWait(pThread->hWorkerThread, RT_INDEFINITE_WAIT, NULL); 839 if (!RT_SUCCESS(rc)) 840 { 841 WARN(("RTThreadWait failed %d\n", rc)); 842 return rc; 843 } 844 845 RTSemEventDestroy(pThread->hEvent); 846 847 ASMAtomicWriteU32(&pThread->u32State, VBOXVDMATHREAD_STATE_TERMINATED); 848 return VINF_SUCCESS; 849 } 850 default: 851 WARN(("invalid state")); 852 return VERR_INVALID_STATE; 853 } 854 } 855 856 int VBoxVDMAThreadCreate(PVBOXVDMATHREAD pThread, PFNRTTHREAD pfnThread, void *pvThread, PFNVBOXVDMATHREAD_CHANGED pfnCreated, void*pvCreated) 857 { 858 int rc = VBoxVDMAThreadCleanup(pThread); 859 if (RT_FAILURE(rc)) 860 { 861 WARN(("VBoxVDMAThreadCleanup failed %d\n", rc)); 862 return rc; 863 } 864 865 rc = RTSemEventCreate(&pThread->hEvent); 697 866 if (RT_SUCCESS(rc)) 698 return VINF_SUCCESS; 699 return rc; 700 } 701 702 DECLINLINE(bool) VBoxVDMAThreadIsTerminating(PVBOXVDMATHREAD pThread) 703 { 704 return ASMAtomicUoReadU32(&pThread->u32State) == VBOXVDMATHREAD_STATE_TERMINATING; 705 } 706 707 int VBoxVDMAThreadCreate(PVBOXVDMATHREAD pThread, PFNRTTHREAD pfnThread, void *pvThread) 708 { 709 int rc = RTSemEventCreate(&pThread->hEvent); 710 if (RT_SUCCESS(rc)) 711 { 712 rc = RTSemEventCreate(&pThread->hClientEvent); 867 { 868 pThread->u32State = VBOXVDMATHREAD_STATE_CREATING; 869 pThread->pfnChanged = pfnCreated; 870 pThread->pvChanged = pvCreated; 871 rc = RTThreadCreate(&pThread->hWorkerThread, pfnThread, pvThread, 0, RTTHREADTYPE_IO, RTTHREADFLAGS_WAITABLE, "VDMA"); 713 872 if (RT_SUCCESS(rc)) 714 { 715 pThread->u32State = VBOXVDMATHREAD_STATE_TERMINATED; 716 rc = RTThreadCreate(&pThread->hWorkerThread, pfnThread, pvThread, 0, RTTHREADTYPE_IO, RTTHREADFLAGS_WAITABLE, "VDMA"); 717 if (RT_SUCCESS(rc)) 718 { 719 rc = RTSemEventWait(pThread->hClientEvent, RT_INDEFINITE_WAIT); 720 if (RT_SUCCESS(rc)) 721 { 722 if (pThread->u32State == VBOXVDMATHREAD_STATE_CREATED) 723 return VINF_SUCCESS; 724 WARN(("thread routine failed the initialization\n")); 725 rc = VERR_INVALID_STATE; 726 } 727 else 728 WARN(("RTSemEventWait failed %d\n", rc)); 729 730 RTThreadWait(pThread->hWorkerThread, RT_INDEFINITE_WAIT, NULL); 731 } 732 else 733 WARN(("RTThreadCreate failed %d\n", rc)); 734 735 RTSemEventDestroy(pThread->hClientEvent); 736 } 873 return VINF_SUCCESS; 737 874 else 738 WARN(("RT SemEventCreate failed %d\n", rc));875 WARN(("RTThreadCreate failed %d\n", rc)); 739 876 740 877 RTSemEventDestroy(pThread->hEvent); … … 742 879 else 743 880 WARN(("RTSemEventCreate failed %d\n", rc)); 881 882 pThread->u32State = VBOXVDMATHREAD_STATE_TERMINATED; 744 883 745 884 return rc; … … 760 899 } 761 900 762 void VBoxVDMAThreadMarkTerminating(PVBOXVDMATHREAD pThread) 763 { 764 Assert(pThread->u32State == VBOXVDMATHREAD_STATE_CREATED); 765 ASMAtomicWriteU32(&pThread->u32State, VBOXVDMATHREAD_STATE_TERMINATING); 766 } 767 768 void VBoxVDMAThreadTerm(PVBOXVDMATHREAD pThread) 901 int VBoxVDMAThreadTerm(PVBOXVDMATHREAD pThread, PFNVBOXVDMATHREAD_CHANGED pfnTerminated, void*pvTerminated, bool fNotify) 769 902 { 770 903 int rc; 771 if (ASMAtomicReadU32(&pThread->u32State) != VBOXVDMATHREAD_STATE_TERMINATING) 772 { 773 VBoxVDMAThreadMarkTerminating(pThread); 774 rc = VBoxVDMAThreadEventNotify(pThread); 775 AssertRC(rc); 776 } 777 rc = RTThreadWait(pThread->hWorkerThread, RT_INDEFINITE_WAIT, NULL); 778 AssertRC(rc); 779 RTSemEventDestroy(pThread->hClientEvent); 780 RTSemEventDestroy(pThread->hEvent); 904 do 905 { 906 uint32_t u32State = ASMAtomicUoReadU32(&pThread->u32State); 907 switch (u32State) 908 { 909 case VBOXVDMATHREAD_STATE_CREATED: 910 pThread->pvChanged = pfnTerminated; 911 pThread->pvChanged = pvTerminated; 912 ASMAtomicWriteU32(&pThread->u32State, VBOXVDMATHREAD_STATE_TERMINATING); 913 if (fNotify) 914 { 915 rc = VBoxVDMAThreadEventNotify(pThread); 916 AssertRC(rc); 917 } 918 return VINF_SUCCESS; 919 case VBOXVDMATHREAD_STATE_TERMINATING: 920 case VBOXVDMATHREAD_STATE_TERMINATED: 921 { 922 WARN(("thread is marked to termination or terminated\nn")); 923 return VERR_INVALID_STATE; 924 } 925 case VBOXVDMATHREAD_STATE_CREATING: 926 { 927 /* wait till the thread creation is completed */ 928 WARN(("concurrent thread create/destron\n")); 929 RTThreadYield(); 930 continue; 931 } 932 default: 933 WARN(("invalid state")); 934 return VERR_INVALID_STATE; 935 } 936 } while (1); 937 938 WARN(("should never be here\n")); 939 return VERR_INTERNAL_ERROR; 781 940 } 782 941 … … 944 1103 } 945 1104 1105 static int vdmaVBVACtlDisableSync(PVBOXVDMAHOST pVdma) 1106 { 1107 VBVAEXHOSTCTL HCtl; 1108 HCtl.enmType = VBVAEXHOSTCTL_TYPE_GHH_DISABLE; 1109 return vdmaVBVACtlSubmitSync(pVdma, &HCtl, VBVAEXHOSTCTL_SOURCE_HOST); 1110 } 1111 946 1112 static DECLCALLBACK(uint8_t*) vboxVDMACrHgcmHandleEnableRemainingHostCommand(HVBOXCRCMDCTL_REMAINING_HOST_COMMAND hClient, uint32_t *pcbCtl, int prevCmdRc) 947 1113 { … … 972 1138 struct VBOXVDMAHOST *pVdma = hClient; 973 1139 Assert(pVdma->CmdVbva.i32State == VBVAEXHOSTCONTEXT_STATE_PROCESSING); 974 int rc = VBoxVDMAThreadEventNotify(&pVdma->Thread); 975 AssertRC(rc); 1140 Assert(pVdma->Thread.u32State == VBOXVDMATHREAD_STATE_TERMINATING); 976 1141 } 977 1142 … … 979 1144 { 980 1145 struct VBOXVDMAHOST *pVdma = hClient; 981 VBVAEXHOSTCTL Ctl; 982 Ctl.enmType = VBVAEXHOSTCTL_TYPE_HH_ON_HGCM_TERM; 983 int rc = vdmaVBVACtlSubmitSync(pVdma, &Ctl, VBVAEXHOSTCTL_SOURCE_HOST_ANY); 984 if (!RT_SUCCESS(rc)) 985 { 986 WARN(("vdmaVBVACtlSubmitSync failed %d\n", rc)); 987 return rc; 988 } 989 990 Assert(pVdma->CmdVbva.i32State == VBVAEXHOSTCONTEXT_STATE_PROCESSING); 1146 VBVAEXHOSTCTL HCtl; 1147 HCtl.enmType = VBVAEXHOSTCTL_TYPE_HH_ON_HGCM_UNLOAD; 1148 int rc = vdmaVBVACtlSubmitSync(pVdma, &HCtl, VBVAEXHOSTCTL_SOURCE_HOST); 991 1149 992 1150 pHgcmEnableData->hRHCmd = pVdma; 993 1151 pHgcmEnableData->pfnRHCmd = vboxVDMACrHgcmHandleEnableRemainingHostCommand; 994 1152 995 return VINF_SUCCESS; 1153 if (RT_FAILURE(rc)) 1154 { 1155 if (rc == VERR_INVALID_STATE) 1156 rc = VINF_SUCCESS; 1157 else 1158 WARN(("vdmaVBVACtlSubmitSync failed %d\n", rc)); 1159 } 1160 1161 return rc; 996 1162 } 997 1163 … … 1118 1284 { 1119 1285 case VBVAEXHOSTCTL_TYPE_HH_SAVESTATE: 1120 if (!VBoxVBVAExHSIsEnabled(&pVdma->CmdVbva)) 1121 { 1122 WARN(("VBVAEXHOSTCTL_TYPE_HH_SAVESTATE for disabled vdma VBVA\n")); 1123 return VERR_INVALID_STATE; 1286 { 1287 PVGASTATE pVGAState = pVdma->pVGAState; 1288 uint8_t * pu8VramBase = pVGAState->vram_ptrR3; 1289 int rc = VBoxVBVAExHSSaveState(&pVdma->CmdVbva, pu8VramBase, pCmd->u.state.pSSM); 1290 if (RT_FAILURE(rc)) 1291 { 1292 WARN(("VBoxVBVAExHSSaveState failed %d\n", rc)); 1293 return rc; 1124 1294 } 1125 1295 return pVdma->CrSrvInfo.pfnSaveState(pVdma->CrSrvInfo.hSvr, pCmd->u.state.pSSM); 1296 } 1126 1297 case VBVAEXHOSTCTL_TYPE_HH_LOADSTATE: 1127 if (!VBoxVBVAExHSIsEnabled(&pVdma->CmdVbva)) 1128 { 1129 WARN(("VBVAEXHOSTCTL_TYPE_HH_LOADSTATE for disabled vdma VBVA\n")); 1130 return VERR_INVALID_STATE; 1298 { 1299 PVGASTATE pVGAState = pVdma->pVGAState; 1300 uint8_t * pu8VramBase = pVGAState->vram_ptrR3; 1301 int rc = VBoxVBVAExHSLoadState(&pVdma->CmdVbva, pu8VramBase, pCmd->u.state.pSSM, pCmd->u.state.u32Version); 1302 if (RT_FAILURE(rc)) 1303 { 1304 WARN(("VBoxVBVAExHSSaveState failed %d\n", rc)); 1305 return rc; 1131 1306 } 1132 1307 return pVdma->CrSrvInfo.pfnLoadState(pVdma->CrSrvInfo.hSvr, pCmd->u.state.pSSM, pCmd->u.state.u32Version); 1308 } 1133 1309 case VBVAEXHOSTCTL_TYPE_GHH_BE_OPAQUE: 1134 1310 if (!VBoxVBVAExHSIsEnabled(&pVdma->CmdVbva)) … … 1138 1314 } 1139 1315 return pVdma->CrSrvInfo.pfnHostCtl(pVdma->CrSrvInfo.hSvr, pCmd->u.cmd.pu8Cmd, pCmd->u.cmd.cbCmd); 1140 case VBVAEXHOSTCTL_TYPE_ HH_TERM:1316 case VBVAEXHOSTCTL_TYPE_GHH_DISABLE: 1141 1317 { 1142 1318 int rc = vdmaVBVADisableProcess(pVdma, true); 1143 if ( !RT_SUCCESS(rc))1319 if (RT_FAILURE(rc)) 1144 1320 { 1145 1321 WARN(("vdmaVBVADisableProcess failed %d\n", rc)); … … 1147 1323 } 1148 1324 1149 VBoxVDMAThreadMarkTerminating(&pVdma->Thread); 1150 return VINF_SUCCESS; 1151 } 1152 case VBVAEXHOSTCTL_TYPE_HH_ON_HGCM_TERM: 1325 return VBoxVDMAThreadTerm(&pVdma->Thread, NULL, NULL, false); 1326 } 1327 case VBVAEXHOSTCTL_TYPE_HH_ON_HGCM_UNLOAD: 1153 1328 { 1154 1329 int rc = vdmaVBVADisableProcess(pVdma, false); 1155 if ( !RT_SUCCESS(rc))1330 if (RT_FAILURE(rc)) 1156 1331 { 1157 1332 WARN(("vdmaVBVADisableProcess failed %d\n", rc)); … … 1159 1334 } 1160 1335 1336 rc = VBoxVDMAThreadTerm(&pVdma->Thread, NULL, NULL, false); 1337 if (RT_FAILURE(rc)) 1338 { 1339 WARN(("VBoxVDMAThreadTerm failed %d\n", rc)); 1340 return rc; 1341 } 1342 1161 1343 *pfContinue = false; 1162 1163 return VINF_SUCCESS;1164 }1165 case VBVAEXHOSTCTL_TYPE_HH_RESET:1166 {1167 int rc = vdmaVBVADisableProcess(pVdma, true);1168 if (!RT_SUCCESS(rc))1169 {1170 WARN(("vdmaVBVADisableProcess failed %d\n", rc));1171 return rc;1172 }1173 1344 return VINF_SUCCESS; 1174 1345 } … … 1190 1361 } 1191 1362 return pVdma->CrSrvInfo.pfnGuestCtl(pVdma->CrSrvInfo.hSvr, pCmd->u.cmd.pu8Cmd, pCmd->u.cmd.cbCmd); 1192 case VBVAEXHOSTCTL_TYPE_GH _ENABLE_DISABLE:1363 case VBVAEXHOSTCTL_TYPE_GHH_ENABLE: 1193 1364 { 1194 1365 VBVAENABLE *pEnable = (VBVAENABLE *)pCmd->u.cmd.pu8Cmd; 1195 1366 Assert(pCmd->u.cmd.cbCmd == sizeof (VBVAENABLE)); 1196 if ((pEnable->u32Flags & (VBVA_F_ENABLE | VBVA_F_DISABLE)) == VBVA_F_ENABLE) 1197 { 1198 uint32_t u32Offset = pEnable->u32Offset; 1199 return vdmaVBVAEnableProcess(pVdma, u32Offset); 1200 } 1201 1202 return vdmaVBVADisableProcess(pVdma, true); 1367 uint32_t u32Offset = pEnable->u32Offset; 1368 return vdmaVBVAEnableProcess(pVdma, u32Offset); 1369 } 1370 case VBVAEXHOSTCTL_TYPE_GHH_DISABLE: 1371 { 1372 int rc = vdmaVBVADisableProcess(pVdma, true); 1373 if (RT_FAILURE(rc)) 1374 { 1375 WARN(("vdmaVBVADisableProcess failed %d\n", rc)); 1376 return rc; 1377 } 1378 1379 return VBoxVDMAThreadTerm(&pVdma->Thread, NULL, NULL, false); 1203 1380 } 1204 1381 default: … … 2048 2225 uint8_t *pCmd; 2049 2226 uint32_t cbCmd; 2050 2051 int rc = VBoxVDMAThreadNotifyConstructSucceeded(&pVdma->Thread); 2052 if (!RT_SUCCESS(rc)) 2053 { 2054 WARN(("VBoxVDMAThreadNotifyConstructSucceeded failed %d\n", rc)); 2055 return rc; 2056 } 2227 int rc; 2228 2229 VBoxVDMAThreadNotifyConstructSucceeded(&pVdma->Thread, pvUser); 2057 2230 2058 2231 while (!VBoxVDMAThreadIsTerminating(&pVdma->Thread)) … … 2088 2261 } 2089 2262 2263 VBoxVDMAThreadNotifyTerminatingSucceeded(&pVdma->Thread, pvUser); 2264 2090 2265 return VINF_SUCCESS; 2091 2266 } … … 2190 2365 2191 2366 #ifdef VBOX_VDMA_WITH_WATCHDOG 2192 2367 rc = PDMDevHlpTMTimerCreate(pVGAState->pDevInsR3, TMCLOCK_REAL, vboxVDMAWatchDogTimer, 2193 2368 pVdma, TMTIMER_FLAGS_NO_CRIT_SECT, 2194 2369 "VDMA WatchDog Timer", &pVdma->WatchDogTimer); 2195 2370 AssertRC(rc); 2196 2371 #endif 2197 2372 2198 2373 #ifdef VBOX_WITH_CRHGSMI 2374 VBoxVDMAThreadInit(&pVdma->Thread); 2375 2199 2376 rc = RTSemEventMultiCreate(&pVdma->HostCrCtlCompleteEvent); 2200 2377 if (RT_SUCCESS(rc)) … … 2203 2380 if (RT_SUCCESS(rc)) 2204 2381 { 2205 rc = VBoxVDMAThreadCreate(&pVdma->Thread, vboxVDMAWorkerThread, pVdma); 2206 if (RT_SUCCESS(rc)) 2207 { 2208 pVGAState->pVdma = pVdma; 2209 int rcIgnored = vboxVDMACrCtlHgsmiSetup(pVdma); NOREF(rcIgnored); /** @todo is this ignoring intentional? */ 2210 return VINF_SUCCESS; 2211 } 2212 else 2213 WARN(("VBoxVDMAThreadCreate faile %d\n", rc)); 2382 pVGAState->pVdma = pVdma; 2383 int rcIgnored = vboxVDMACrCtlHgsmiSetup(pVdma); NOREF(rcIgnored); /** @todo is this ignoring intentional? */ 2384 return VINF_SUCCESS; 2214 2385 2215 2386 VBoxVBVAExHSTerm(&pVdma->CmdVbva); … … 2239 2410 { 2240 2411 #ifdef VBOX_WITH_CRHGSMI 2241 VBVAEXHOSTCTL Ctl; 2242 Ctl.enmType = VBVAEXHOSTCTL_TYPE_HH_RESET; 2243 int rc = vdmaVBVACtlSubmitSync(pVdma, &Ctl, VBVAEXHOSTCTL_SOURCE_HOST_ANY); 2244 if (!RT_SUCCESS(rc)) 2245 { 2246 WARN(("vdmaVBVACtlSubmitSync failed %d\n", rc)); 2247 return rc; 2248 } 2412 vdmaVBVACtlDisableSync(pVdma); 2249 2413 #endif 2250 2414 return VINF_SUCCESS; … … 2254 2418 { 2255 2419 #ifdef VBOX_WITH_CRHGSMI 2256 VBVAEXHOSTCTL Ctl; 2257 Ctl.enmType = VBVAEXHOSTCTL_TYPE_HH_TERM; 2258 int rc = vdmaVBVACtlSubmitSync(pVdma, &Ctl, VBVAEXHOSTCTL_SOURCE_HOST_ANY); 2259 if (!RT_SUCCESS(rc)) 2260 { 2261 WARN(("vdmaVBVACtlSubmitSync failed %d\n", rc)); 2262 return rc; 2263 } 2264 VBoxVDMAThreadTerm(&pVdma->Thread); 2420 vdmaVBVACtlDisableSync(pVdma); 2421 VBoxVDMAThreadCleanup(&pVdma->Thread); 2265 2422 VBoxVBVAExHSTerm(&pVdma->CmdVbva); 2266 2423 RTSemEventMultiDestroy(pVdma->HostCrCtlCompleteEvent); … … 2268 2425 RTMemFree(pVdma); 2269 2426 return VINF_SUCCESS; 2270 }2271 2272 int vboxVDMASaveStateExecPrep(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM)2273 {2274 #ifdef VBOX_WITH_CRHGSMI2275 PVGASTATE pVGAState = pVdma->pVGAState;2276 PVBOXVDMACMD_CHROMIUM_CTL pCmd = (PVBOXVDMACMD_CHROMIUM_CTL)vboxVDMACrCtlCreate(2277 VBOXVDMACMD_CHROMIUM_CTL_TYPE_SAVESTATE_BEGIN, sizeof (*pCmd));2278 Assert(pCmd);2279 if (pCmd)2280 {2281 int rc = vboxVDMACrCtlPost(pVGAState, pCmd, sizeof (*pCmd));2282 AssertRC(rc);2283 if (RT_SUCCESS(rc))2284 {2285 rc = vboxVDMACrCtlGetRc(pCmd);2286 }2287 vboxVDMACrCtlRelease(pCmd);2288 return rc;2289 }2290 return VERR_NO_MEMORY;2291 #else2292 return VINF_SUCCESS;2293 #endif2294 }2295 2296 int vboxVDMASaveStateExecDone(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM)2297 {2298 #ifdef VBOX_WITH_CRHGSMI2299 PVGASTATE pVGAState = pVdma->pVGAState;2300 PVBOXVDMACMD_CHROMIUM_CTL pCmd = (PVBOXVDMACMD_CHROMIUM_CTL)vboxVDMACrCtlCreate(2301 VBOXVDMACMD_CHROMIUM_CTL_TYPE_SAVESTATE_END, sizeof (*pCmd));2302 Assert(pCmd);2303 if (pCmd)2304 {2305 int rc = vboxVDMACrCtlPost(pVGAState, pCmd, sizeof (*pCmd));2306 AssertRC(rc);2307 if (RT_SUCCESS(rc))2308 {2309 rc = vboxVDMACrCtlGetRc(pCmd);2310 }2311 vboxVDMACrCtlRelease(pCmd);2312 return rc;2313 }2314 return VERR_NO_MEMORY;2315 #else2316 return VINF_SUCCESS;2317 #endif2318 2427 } 2319 2428 … … 2376 2485 /**/ 2377 2486 #ifdef VBOX_WITH_CRHGSMI 2487 2488 static DECLCALLBACK(void) vdmaVBVACtlSubmitSyncCompletion(VBVAEXHOSTCONTEXT *pVbva, struct VBVAEXHOSTCTL *pCtl, int rc, void *pvContext); 2489 2378 2490 static int vdmaVBVACtlSubmit(PVBOXVDMAHOST pVdma, VBVAEXHOSTCTL* pCtl, VBVAEXHOSTCTL_SOURCE enmSource, PFNVBVAEXHOSTCTL_COMPLETE pfnComplete, void *pvComplete) 2379 2491 { … … 2418 2530 pHCtl->u.cmd.cbCmd = cbCmd; 2419 2531 int rc = vdmaVBVACtlSubmit(pVdma, pHCtl, enmSource, pfnComplete, pvComplete); 2420 if (!RT_SUCCESS(rc)) 2421 { 2532 if (RT_FAILURE(rc)) 2533 { 2534 VBoxVBVAExHCtlFree(&pVdma->CmdVbva, pHCtl); 2422 2535 Log(("vdmaVBVACtlSubmit failed rc %d\n", rc)); 2423 2536 return rc;; … … 2454 2567 { 2455 2568 pCmd->u.pfnInternal = (void(*)())pfnCompletion; 2456 int rc = vdmaVBVACtlOpaqueSubmit(pVdma, VBVAEXHOSTCTL_SOURCE_HOST _ENABLED, (uint8_t*)pCmd, cbCmd, vboxCmdVBVACmdCtlHostCompletion, pvCompletion);2457 if ( !RT_SUCCESS(rc))2569 int rc = vdmaVBVACtlOpaqueSubmit(pVdma, VBVAEXHOSTCTL_SOURCE_HOST, (uint8_t*)pCmd, cbCmd, vboxCmdVBVACmdCtlHostCompletion, pvCompletion); 2570 if (RT_FAILURE(rc)) 2458 2571 { 2459 2572 if (rc == VERR_INVALID_STATE) … … 2474 2587 } 2475 2588 2476 static int vdmaVBVACtlEnableDisableSubmitInternal(PVBOXVDMAHOST pVdma, VBVAENABLE *pEnable, PFNVBVAEXHOSTCTL_COMPLETE pfnComplete, void *pvComplete) 2477 { 2478 VBVAEXHOSTCTL* pHCtl = VBoxVBVAExHCtlCreate(&pVdma->CmdVbva, VBVAEXHOSTCTL_TYPE_GH_ENABLE_DISABLE); 2589 static DECLCALLBACK(void) vdmaVBVACtlThreadCreatedEnable(struct VBOXVDMATHREAD *pThread, int rc, void *pvThreadContext, void *pvContext) 2590 { 2591 PVBOXVDMAHOST pVdma = (PVBOXVDMAHOST)pvThreadContext; 2592 VBVAEXHOSTCTL* pHCtl = (VBVAEXHOSTCTL*)pvContext; 2593 2594 if (RT_SUCCESS(rc)) 2595 { 2596 rc = vboxVDMACrGuestCtlProcess(pVdma, pHCtl); 2597 if (RT_FAILURE(rc)) 2598 WARN(("vboxVDMACrGuestCtlProcess failed %d\n", rc)); 2599 } 2600 else 2601 WARN(("vdmaVBVACtlThreadCreatedEnable is passed %d\n", rc)); 2602 2603 VBoxVBVAExHPDataCompleteCtl(&pVdma->CmdVbva, pHCtl, rc); 2604 } 2605 2606 static int vdmaVBVACtlEnableSubmitInternal(PVBOXVDMAHOST pVdma, VBVAENABLE *pEnable, PFNVBVAEXHOSTCTL_COMPLETE pfnComplete, void *pvComplete) 2607 { 2608 int rc; 2609 VBVAEXHOSTCTL* pHCtl = VBoxVBVAExHCtlCreate(&pVdma->CmdVbva, VBVAEXHOSTCTL_TYPE_GHH_ENABLE); 2610 if (pHCtl) 2611 { 2612 pHCtl->u.cmd.pu8Cmd = (uint8_t*)pEnable; 2613 pHCtl->u.cmd.cbCmd = sizeof (*pEnable); 2614 pHCtl->pfnComplete = pfnComplete; 2615 pHCtl->pvComplete = pvComplete; 2616 2617 rc = VBoxVDMAThreadCreate(&pVdma->Thread, vboxVDMAWorkerThread, pVdma, vdmaVBVACtlThreadCreatedEnable, pHCtl); 2618 if (RT_SUCCESS(rc)) 2619 return VINF_SUCCESS; 2620 else 2621 WARN(("VBoxVDMAThreadCreate failed %d\n", rc)); 2622 2623 VBoxVBVAExHCtlFree(&pVdma->CmdVbva, pHCtl); 2624 } 2625 else 2626 { 2627 WARN(("VBoxVBVAExHCtlCreate failed\n")); 2628 rc = VERR_NO_MEMORY; 2629 } 2630 2631 return rc; 2632 } 2633 2634 static int vdmaVBVACtlEnableSubmitSync(PVBOXVDMAHOST pVdma, uint32_t offVram) 2635 { 2636 VBVAENABLE Enable = {0}; 2637 Enable.u32Flags = VBVA_F_ENABLE; 2638 Enable.u32Offset = offVram; 2639 2640 VDMA_VBVA_CTL_CYNC_COMPLETION Data; 2641 Data.rc = VERR_NOT_IMPLEMENTED; 2642 int rc = RTSemEventCreate(&Data.hEvent); 2643 if (!RT_SUCCESS(rc)) 2644 { 2645 WARN(("RTSemEventCreate failed %d\n", rc)); 2646 return rc; 2647 } 2648 2649 rc = vdmaVBVACtlEnableSubmitInternal(pVdma, &Enable, vdmaVBVACtlSubmitSyncCompletion, &Data); 2650 if (RT_SUCCESS(rc)) 2651 { 2652 rc = RTSemEventWait(Data.hEvent, RT_INDEFINITE_WAIT); 2653 if (RT_SUCCESS(rc)) 2654 { 2655 rc = Data.rc; 2656 if (!RT_SUCCESS(rc)) 2657 WARN(("vdmaVBVACtlSubmitSyncCompletion returned %d\n", rc)); 2658 } 2659 else 2660 WARN(("RTSemEventWait failed %d\n", rc)); 2661 } 2662 else 2663 WARN(("vdmaVBVACtlSubmit failed %d\n", rc)); 2664 2665 RTSemEventDestroy(Data.hEvent); 2666 2667 return rc; 2668 } 2669 2670 static int vdmaVBVACtlDisableSubmitInternal(PVBOXVDMAHOST pVdma, VBVAENABLE *pEnable, PFNVBVAEXHOSTCTL_COMPLETE pfnComplete, void *pvComplete) 2671 { 2672 int rc; 2673 VBVAEXHOSTCTL* pHCtl; 2674 if (VBoxVBVAExHSIsDisabled(&pVdma->CmdVbva)) 2675 { 2676 WARN(("VBoxVBVAExHSIsDisabled: disabled")); 2677 return VINF_SUCCESS; 2678 } 2679 2680 pHCtl = VBoxVBVAExHCtlCreate(&pVdma->CmdVbva, VBVAEXHOSTCTL_TYPE_GHH_DISABLE); 2479 2681 if (!pHCtl) 2480 2682 { … … 2485 2687 pHCtl->u.cmd.pu8Cmd = (uint8_t*)pEnable; 2486 2688 pHCtl->u.cmd.cbCmd = sizeof (*pEnable); 2487 int rc = vdmaVBVACtlSubmit(pVdma, pHCtl, VBVAEXHOSTCTL_SOURCE_GUEST, pfnComplete, pvComplete); 2488 if (!RT_SUCCESS(rc)) 2489 { 2490 WARN(("vdmaVBVACtlSubmit failed rc %d\n", rc)); 2491 return rc;; 2492 } 2493 return VINF_SUCCESS; 2689 rc = vdmaVBVACtlSubmit(pVdma, pHCtl, VBVAEXHOSTCTL_SOURCE_GUEST, pfnComplete, pvComplete); 2690 if (RT_SUCCESS(rc)) 2691 return VINF_SUCCESS; 2692 2693 WARN(("vdmaVBVACtlSubmit failed rc %d\n", rc)); 2694 VBoxVBVAExHCtlFree(&pVdma->CmdVbva, pHCtl); 2695 return rc; 2696 } 2697 2698 static int vdmaVBVACtlEnableDisableSubmitInternal(PVBOXVDMAHOST pVdma, VBVAENABLE *pEnable, PFNVBVAEXHOSTCTL_COMPLETE pfnComplete, void *pvComplete) 2699 { 2700 bool fEnable = ((pEnable->u32Flags & (VBVA_F_ENABLE | VBVA_F_DISABLE)) == VBVA_F_ENABLE); 2701 if (fEnable) 2702 return vdmaVBVACtlEnableSubmitInternal(pVdma, pEnable, pfnComplete, pvComplete); 2703 return vdmaVBVACtlDisableSubmitInternal(pVdma, pEnable, pfnComplete, pvComplete); 2494 2704 } 2495 2705 … … 2553 2763 VBVAEXHOSTCTL Ctl; 2554 2764 Ctl.enmType = VBVAEXHOSTCTL_TYPE_HH_INTERNAL_PAUSE; 2555 return vdmaVBVACtlSubmitSync(pVdma, &Ctl, VBVAEXHOSTCTL_SOURCE_HOST _ANY);2765 return vdmaVBVACtlSubmitSync(pVdma, &Ctl, VBVAEXHOSTCTL_SOURCE_HOST); 2556 2766 } 2557 2767 … … 2560 2770 VBVAEXHOSTCTL Ctl; 2561 2771 Ctl.enmType = VBVAEXHOSTCTL_TYPE_HH_INTERNAL_RESUME; 2562 return vdmaVBVACtlSubmitSync(pVdma, &Ctl, VBVAEXHOSTCTL_SOURCE_HOST _ANY);2772 return vdmaVBVACtlSubmitSync(pVdma, &Ctl, VBVAEXHOSTCTL_SOURCE_HOST); 2563 2773 } 2564 2774 … … 2712 2922 2713 2923 #endif 2924 2925 int vboxVDMASaveStateExecPrep(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM) 2926 { 2927 #ifdef VBOX_WITH_CRHGSMI 2928 int rc = vdmaVBVAPause(pVdma); 2929 if (RT_SUCCESS(rc)) 2930 return VINF_SUCCESS; 2931 2932 if (rc != VERR_INVALID_STATE) 2933 { 2934 WARN(("vdmaVBVAPause failed %d\n", rc)); 2935 return rc; 2936 } 2937 2938 #ifdef DEBUG_misha 2939 WARN(("debug prep")); 2940 #endif 2941 2942 PVGASTATE pVGAState = pVdma->pVGAState; 2943 PVBOXVDMACMD_CHROMIUM_CTL pCmd = (PVBOXVDMACMD_CHROMIUM_CTL)vboxVDMACrCtlCreate( 2944 VBOXVDMACMD_CHROMIUM_CTL_TYPE_SAVESTATE_BEGIN, sizeof (*pCmd)); 2945 Assert(pCmd); 2946 if (pCmd) 2947 { 2948 int rc = vboxVDMACrCtlPost(pVGAState, pCmd, sizeof (*pCmd)); 2949 AssertRC(rc); 2950 if (RT_SUCCESS(rc)) 2951 { 2952 rc = vboxVDMACrCtlGetRc(pCmd); 2953 } 2954 vboxVDMACrCtlRelease(pCmd); 2955 return rc; 2956 } 2957 return VERR_NO_MEMORY; 2958 #else 2959 return VINF_SUCCESS; 2960 #endif 2961 } 2962 2963 int vboxVDMASaveStateExecDone(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM) 2964 { 2965 #ifdef VBOX_WITH_CRHGSMI 2966 int rc = vdmaVBVAResume(pVdma); 2967 if (RT_SUCCESS(rc)) 2968 return VINF_SUCCESS; 2969 2970 if (rc != VERR_INVALID_STATE) 2971 { 2972 WARN(("vdmaVBVAResume failed %d\n", rc)); 2973 return rc; 2974 } 2975 2976 #ifdef DEBUG_misha 2977 WARN(("debug done")); 2978 #endif 2979 2980 PVGASTATE pVGAState = pVdma->pVGAState; 2981 PVBOXVDMACMD_CHROMIUM_CTL pCmd = (PVBOXVDMACMD_CHROMIUM_CTL)vboxVDMACrCtlCreate( 2982 VBOXVDMACMD_CHROMIUM_CTL_TYPE_SAVESTATE_END, sizeof (*pCmd)); 2983 Assert(pCmd); 2984 if (pCmd) 2985 { 2986 rc = vboxVDMACrCtlPost(pVGAState, pCmd, sizeof (*pCmd)); 2987 AssertRC(rc); 2988 if (RT_SUCCESS(rc)) 2989 { 2990 rc = vboxVDMACrCtlGetRc(pCmd); 2991 } 2992 vboxVDMACrCtlRelease(pCmd); 2993 return rc; 2994 } 2995 return VERR_NO_MEMORY; 2996 #else 2997 return VINF_SUCCESS; 2998 #endif 2999 } 3000 3001 int vboxVDMASaveStateExecPerform(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM) 3002 { 3003 int rc; 3004 3005 #ifdef VBOX_WITH_CRHGSMI 3006 if (!VBoxVBVAExHSIsEnabled(&pVdma->CmdVbva)) 3007 #endif 3008 { 3009 rc = SSMR3PutU32(pSSM, 0xffffffff); 3010 AssertRCReturn(rc, rc); 3011 return VINF_SUCCESS; 3012 } 3013 3014 #ifdef VBOX_WITH_CRHGSMI 3015 PVGASTATE pVGAState = pVdma->pVGAState; 3016 uint8_t * pu8VramBase = pVGAState->vram_ptrR3; 3017 3018 rc = SSMR3PutU32(pSSM, (uint32_t)(((uint8_t*)pVdma->CmdVbva.pVBVA) - pu8VramBase)); 3019 AssertRCReturn(rc, rc); 3020 3021 VBVAEXHOSTCTL HCtl; 3022 HCtl.enmType = VBVAEXHOSTCTL_TYPE_HH_SAVESTATE; 3023 HCtl.u.state.pSSM = pSSM; 3024 HCtl.u.state.u32Version = 0; 3025 return vdmaVBVACtlSubmitSync(pVdma, &HCtl, VBVAEXHOSTCTL_SOURCE_HOST); 3026 #endif 3027 } 3028 3029 int vboxVDMASaveLoadExecPerform(struct VBOXVDMAHOST *pVdma, PSSMHANDLE pSSM, uint32_t u32Version) 3030 { 3031 uint32_t u32; 3032 int rc = SSMR3GetU32(pSSM, &u32); 3033 AssertRCReturn(rc, rc); 3034 3035 if (u32 != 0xffffffff) 3036 { 3037 #ifdef VBOX_WITH_CRHGSMI 3038 rc = vdmaVBVACtlEnableSubmitSync(pVdma, u32); 3039 AssertRCReturn(rc, rc); 3040 3041 rc = vdmaVBVAPause(pVdma); 3042 AssertRCReturn(rc, rc); 3043 3044 VBVAEXHOSTCTL HCtl; 3045 HCtl.enmType = VBVAEXHOSTCTL_TYPE_HH_LOADSTATE; 3046 HCtl.u.state.pSSM = pSSM; 3047 HCtl.u.state.u32Version = u32Version; 3048 rc = vdmaVBVACtlSubmitSync(pVdma, &HCtl, VBVAEXHOSTCTL_SOURCE_HOST); 3049 AssertRCReturn(rc, rc); 3050 3051 rc = vdmaVBVAResume(pVdma); 3052 AssertRCReturn(rc, rc); 3053 3054 return VINF_SUCCESS; 3055 #else 3056 WARN(("Unsupported VBVACtl info!\n")); 3057 return VERR_VERSION_MISMATCH; 3058 #endif 3059 } 3060 3061 return VINF_SUCCESS; 3062 }
Note:
See TracChangeset
for help on using the changeset viewer.