VirtualBox

source: vbox/trunk/src/VBox/VMM/PGMDbg.cpp@ 24911

Last change on this file since 24911 was 24061, checked in by vboxsync, 15 years ago

DBGF,DBGPlugInDiggers: Extended DBGFR3MemScan with an alignment restriction. Added DBGFR3CpuGetMode. Started on the WinNT debug digger - can detect the 32-bit kernel, locate the module list and report the nt version.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 25.7 KB
Line 
1/* $Id: PGMDbg.cpp 24061 2009-10-25 23:54:32Z vboxsync $ */
2/** @file
3 * PGM - Page Manager and Monitor - Debugger & Debugging APIs.
4 */
5
6/*
7 * Copyright (C) 2006-2007 Sun Microsystems, Inc.
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
18 * Clara, CA 95054 USA or visit http://www.sun.com if you need
19 * additional information or have any questions.
20 */
21
22/*******************************************************************************
23* Header Files *
24*******************************************************************************/
25#define LOG_GROUP LOG_GROUP_PGM
26#include <VBox/pgm.h>
27#include <VBox/stam.h>
28#include "PGMInternal.h"
29#include <VBox/vm.h>
30#include <iprt/assert.h>
31#include <iprt/asm.h>
32#include <iprt/string.h>
33#include <VBox/log.h>
34#include <VBox/param.h>
35#include <VBox/err.h>
36
37/** The max needle size that we will bother searching for
38 * This must not be more than half a page! */
39#define MAX_NEEDLE_SIZE 256
40
41
42/**
43 * Converts a R3 pointer to a GC physical address.
44 *
45 * Only for the debugger.
46 *
47 * @returns VBox status code.
48 * @retval VINF_SUCCESS on success, *pGCPhys is set.
49 * @retval VERR_INVALID_POINTER if the pointer is not within the GC physical memory.
50 *
51 * @param pVM The VM handle.
52 * @param R3Ptr The R3 pointer to convert.
53 * @param pGCPhys Where to store the GC physical address on success.
54 */
55VMMR3DECL(int) PGMR3DbgR3Ptr2GCPhys(PVM pVM, RTR3PTR R3Ptr, PRTGCPHYS pGCPhys)
56{
57 *pGCPhys = NIL_RTGCPHYS;
58 return VERR_NOT_IMPLEMENTED;
59}
60
61
62/**
63 * Converts a R3 pointer to a HC physical address.
64 *
65 * Only for the debugger.
66 *
67 * @returns VBox status code.
68 * @retval VINF_SUCCESS on success, *pHCPhys is set.
69 * @retval VERR_PGM_PHYS_PAGE_RESERVED it it's a valid GC physical page but has no physical backing.
70 * @retval VERR_INVALID_POINTER if the pointer is not within the GC physical memory.
71 *
72 * @param pVM The VM handle.
73 * @param R3Ptr The R3 pointer to convert.
74 * @param pHCPhys Where to store the HC physical address on success.
75 */
76VMMR3DECL(int) PGMR3DbgR3Ptr2HCPhys(PVM pVM, RTR3PTR R3Ptr, PRTHCPHYS pHCPhys)
77{
78 *pHCPhys = NIL_RTHCPHYS;
79 return VERR_NOT_IMPLEMENTED;
80}
81
82
83/**
84 * Converts a HC physical address to a GC physical address.
85 *
86 * Only for the debugger.
87 *
88 * @returns VBox status code
89 * @retval VINF_SUCCESS on success, *pGCPhys is set.
90 * @retval VERR_INVALID_POINTER if the HC physical address is not within the GC physical memory.
91 *
92 * @param pVM The VM handle.
93 * @param HCPhys The HC physical address to convert.
94 * @param pGCPhys Where to store the GC physical address on success.
95 */
96VMMR3DECL(int) PGMR3DbgHCPhys2GCPhys(PVM pVM, RTHCPHYS HCPhys, PRTGCPHYS pGCPhys)
97{
98 /*
99 * Validate and adjust the input a bit.
100 */
101 if (HCPhys == NIL_RTHCPHYS)
102 return VERR_INVALID_POINTER;
103 unsigned off = HCPhys & PAGE_OFFSET_MASK;
104 HCPhys &= X86_PTE_PAE_PG_MASK;
105 if (HCPhys == 0)
106 return VERR_INVALID_POINTER;
107
108 for (PPGMRAMRANGE pRam = pVM->pgm.s.CTX_SUFF(pRamRanges);
109 pRam;
110 pRam = pRam->CTX_SUFF(pNext))
111 {
112 uint32_t iPage = pRam->cb >> PAGE_SHIFT;
113 while (iPage-- > 0)
114 if (PGM_PAGE_GET_HCPHYS(&pRam->aPages[iPage]) == HCPhys)
115 {
116 *pGCPhys = pRam->GCPhys + (iPage << PAGE_SHIFT) + off;
117 return VINF_SUCCESS;
118 }
119 }
120 return VERR_INVALID_POINTER;
121}
122
123
124/**
125 * Read physical memory API for the debugger, similar to
126 * PGMPhysSimpleReadGCPhys.
127 *
128 * @returns VBox status code.
129 *
130 * @param pVM The VM handle.
131 * @param pvDst Where to store what's read.
132 * @param GCPhysDst Where to start reading from.
133 * @param cb The number of bytes to attempt reading.
134 * @param fFlags Flags, MBZ.
135 * @param pcbRead For store the actual number of bytes read, pass NULL if
136 * partial reads are unwanted.
137 * @todo Unused?
138 */
139VMMR3DECL(int) PGMR3DbgReadGCPhys(PVM pVM, void *pvDst, RTGCPHYS GCPhysSrc, size_t cb, uint32_t fFlags, size_t *pcbRead)
140{
141 /* validate */
142 AssertReturn(!fFlags, VERR_INVALID_PARAMETER);
143 AssertReturn(pVM, VERR_INVALID_PARAMETER);
144
145 /* try simple first. */
146 int rc = PGMPhysSimpleReadGCPhys(pVM, pvDst, GCPhysSrc, cb);
147 if (RT_SUCCESS(rc) || !pcbRead)
148 return rc;
149
150 /* partial read that failed, chop it up in pages. */
151 *pcbRead = 0;
152 size_t const cbReq = cb;
153 rc = VINF_SUCCESS;
154 while (cb > 0)
155 {
156 size_t cbChunk = PAGE_SIZE;
157 cbChunk -= GCPhysSrc & PAGE_OFFSET_MASK;
158 if (cbChunk > cb)
159 cbChunk = cb;
160
161 rc = PGMPhysSimpleReadGCPhys(pVM, pvDst, GCPhysSrc, cbChunk);
162
163 /* advance */
164 if (RT_FAILURE(rc))
165 break;
166 *pcbRead += cbChunk;
167 cb -= cbChunk;
168 GCPhysSrc += cbChunk;
169 pvDst = (uint8_t *)pvDst + cbChunk;
170 }
171
172 return *pcbRead && RT_FAILURE(rc) ? -rc : rc;
173}
174
175
176/**
177 * Write physical memory API for the debugger, similar to
178 * PGMPhysSimpleWriteGCPhys.
179 *
180 * @returns VBox status code.
181 *
182 * @param pVM The VM handle.
183 * @param GCPhysDst Where to start writing.
184 * @param pvSrc What to write.
185 * @param cb The number of bytes to attempt writing.
186 * @param fFlags Flags, MBZ.
187 * @param pcbWritten For store the actual number of bytes written, pass NULL
188 * if partial writes are unwanted.
189 * @todo Unused?
190 */
191VMMR3DECL(int) PGMR3DbgWriteGCPhys(PVM pVM, RTGCPHYS GCPhysDst, const void *pvSrc, size_t cb, uint32_t fFlags, size_t *pcbWritten)
192{
193 /* validate */
194 AssertReturn(!fFlags, VERR_INVALID_PARAMETER);
195 AssertReturn(pVM, VERR_INVALID_PARAMETER);
196
197 /* try simple first. */
198 int rc = PGMPhysSimpleWriteGCPhys(pVM, GCPhysDst, pvSrc, cb);
199 if (RT_SUCCESS(rc) || !pcbWritten)
200 return rc;
201
202 /* partial write that failed, chop it up in pages. */
203 *pcbWritten = 0;
204 rc = VINF_SUCCESS;
205 while (cb > 0)
206 {
207 size_t cbChunk = PAGE_SIZE;
208 cbChunk -= GCPhysDst & PAGE_OFFSET_MASK;
209 if (cbChunk > cb)
210 cbChunk = cb;
211
212 rc = PGMPhysSimpleWriteGCPhys(pVM, GCPhysDst, pvSrc, cbChunk);
213
214 /* advance */
215 if (RT_FAILURE(rc))
216 break;
217 *pcbWritten += cbChunk;
218 cb -= cbChunk;
219 GCPhysDst += cbChunk;
220 pvSrc = (uint8_t const *)pvSrc + cbChunk;
221 }
222
223 return *pcbWritten && RT_FAILURE(rc) ? -rc : rc;
224
225}
226
227
228/**
229 * Read virtual memory API for the debugger, similar to PGMPhysSimpleReadGCPtr.
230 *
231 * @returns VBox status code.
232 *
233 * @param pVM The VM handle.
234 * @param pvDst Where to store what's read.
235 * @param GCPtrDst Where to start reading from.
236 * @param cb The number of bytes to attempt reading.
237 * @param fFlags Flags, MBZ.
238 * @param pcbRead For store the actual number of bytes read, pass NULL if
239 * partial reads are unwanted.
240 * @todo Unused?
241 */
242VMMR3DECL(int) PGMR3DbgReadGCPtr(PVM pVM, void *pvDst, RTGCPTR GCPtrSrc, size_t cb, uint32_t fFlags, size_t *pcbRead)
243{
244 /* validate */
245 AssertReturn(!fFlags, VERR_INVALID_PARAMETER);
246 AssertReturn(pVM, VERR_INVALID_PARAMETER);
247
248 /* @todo SMP support! */
249 PVMCPU pVCpu = &pVM->aCpus[0];
250
251/** @todo deal with HMA */
252 /* try simple first. */
253 int rc = PGMPhysSimpleReadGCPtr(pVCpu, pvDst, GCPtrSrc, cb);
254 if (RT_SUCCESS(rc) || !pcbRead)
255 return rc;
256
257 /* partial read that failed, chop it up in pages. */
258 *pcbRead = 0;
259 rc = VINF_SUCCESS;
260 while (cb > 0)
261 {
262 size_t cbChunk = PAGE_SIZE;
263 cbChunk -= GCPtrSrc & PAGE_OFFSET_MASK;
264 if (cbChunk > cb)
265 cbChunk = cb;
266
267 rc = PGMPhysSimpleReadGCPtr(pVCpu, pvDst, GCPtrSrc, cbChunk);
268
269 /* advance */
270 if (RT_FAILURE(rc))
271 break;
272 *pcbRead += cbChunk;
273 cb -= cbChunk;
274 GCPtrSrc += cbChunk;
275 pvDst = (uint8_t *)pvDst + cbChunk;
276 }
277
278 return *pcbRead && RT_FAILURE(rc) ? -rc : rc;
279
280}
281
282
283/**
284 * Write virtual memory API for the debugger, similar to
285 * PGMPhysSimpleWriteGCPtr.
286 *
287 * @returns VBox status code.
288 *
289 * @param pVM The VM handle.
290 * @param GCPtrDst Where to start writing.
291 * @param pvSrc What to write.
292 * @param cb The number of bytes to attempt writing.
293 * @param fFlags Flags, MBZ.
294 * @param pcbWritten For store the actual number of bytes written, pass NULL
295 * if partial writes are unwanted.
296 * @todo Unused?
297 */
298VMMR3DECL(int) PGMR3DbgWriteGCPtr(PVM pVM, RTGCPTR GCPtrDst, void const *pvSrc, size_t cb, uint32_t fFlags, size_t *pcbWritten)
299{
300 /* validate */
301 AssertReturn(!fFlags, VERR_INVALID_PARAMETER);
302 AssertReturn(pVM, VERR_INVALID_PARAMETER);
303
304 /* @todo SMP support! */
305 PVMCPU pVCpu = &pVM->aCpus[0];
306
307/** @todo deal with HMA */
308 /* try simple first. */
309 int rc = PGMPhysSimpleWriteGCPtr(pVCpu, GCPtrDst, pvSrc, cb);
310 if (RT_SUCCESS(rc) || !pcbWritten)
311 return rc;
312
313 /* partial write that failed, chop it up in pages. */
314 *pcbWritten = 0;
315 rc = VINF_SUCCESS;
316 while (cb > 0)
317 {
318 size_t cbChunk = PAGE_SIZE;
319 cbChunk -= GCPtrDst & PAGE_OFFSET_MASK;
320 if (cbChunk > cb)
321 cbChunk = cb;
322
323 rc = PGMPhysSimpleWriteGCPtr(pVCpu, GCPtrDst, pvSrc, cbChunk);
324
325 /* advance */
326 if (RT_FAILURE(rc))
327 break;
328 *pcbWritten += cbChunk;
329 cb -= cbChunk;
330 GCPtrDst += cbChunk;
331 pvSrc = (uint8_t const *)pvSrc + cbChunk;
332 }
333
334 return *pcbWritten && RT_FAILURE(rc) ? -rc : rc;
335
336}
337
338
339/**
340 * memchr() with alignment considerations.
341 *
342 * @returns Pointer to matching byte, NULL if none found.
343 * @param pb Where to search. Aligned.
344 * @param b What to search for.
345 * @param cb How much to search .
346 * @param uAlign The alignment restriction of the result.
347 */
348static const uint8_t *pgmR3DbgAlignedMemChr(const uint8_t *pb, uint8_t b, size_t cb, uint32_t uAlign)
349{
350 const uint8_t *pbRet;
351 if (uAlign <= 32)
352 {
353 pbRet = (const uint8_t *)memchr(pb, b, cb);
354 if ((uintptr_t)pbRet & (uAlign - 1))
355 {
356 do
357 {
358 pbRet++;
359 size_t cbLeft = cb - (pbRet - pb);
360 if (!cbLeft)
361 {
362 pbRet = NULL;
363 break;
364 }
365 pbRet = (const uint8_t *)memchr(pbRet, b, cbLeft);
366 } while ((uintptr_t)pbRet & (uAlign - 1));
367 }
368 }
369 else
370 {
371 pbRet = NULL;
372 if (cb)
373 {
374 for (;;)
375 {
376 if (*pb == b)
377 {
378 pbRet = pb;
379 break;
380 }
381 if (cb <= uAlign)
382 break;
383 cb -= uAlign;
384 pb += uAlign;
385 }
386 }
387 }
388 return pbRet;
389}
390
391
392/**
393 * Scans a page for a byte string, keeping track of potential
394 * cross page matches.
395 *
396 * @returns true and *poff on match.
397 * false on mismatch.
398 * @param pbPage Pointer to the current page.
399 * @param poff Input: The offset into the page (aligned).
400 * Output: The page offset of the match on success.
401 * @param cb The number of bytes to search, starting of *poff.
402 * @param uAlign The needle alignment. This is of course less than a page.
403 * @param pabNeedle The byte string to search for.
404 * @param cbNeedle The length of the byte string.
405 * @param pabPrev The buffer that keeps track of a partial match that we
406 * bring over from the previous page. This buffer must be
407 * at least cbNeedle - 1 big.
408 * @param pcbPrev Input: The number of partial matching bytes from the previous page.
409 * Output: The number of partial matching bytes from this page.
410 * Initialize to 0 before the first call to this function.
411 */
412static bool pgmR3DbgScanPage(const uint8_t *pbPage, int32_t *poff, uint32_t cb, uint32_t uAlign,
413 const uint8_t *pabNeedle, size_t cbNeedle,
414 uint8_t *pabPrev, size_t *pcbPrev)
415{
416 /*
417 * Try complete any partial match from the previous page.
418 */
419 if (*pcbPrev > 0)
420 {
421 size_t cbPrev = *pcbPrev;
422 Assert(!*poff);
423 Assert(cbPrev < cbNeedle);
424 if (!memcmp(pbPage, pabNeedle + cbPrev, cbNeedle - cbPrev))
425 {
426 if (cbNeedle - cbPrev > cb)
427 return false;
428 *poff = -(int32_t)cbPrev;
429 return true;
430 }
431
432 /* check out the remainder of the previous page. */
433 const uint8_t *pb = pabPrev;
434 for (;;)
435 {
436 if (cbPrev <= uAlign)
437 break;
438 cbPrev -= uAlign;
439 pb = pgmR3DbgAlignedMemChr(pb + uAlign, *pabNeedle, cbPrev, uAlign);
440 if (!pb)
441 break;
442 cbPrev = *pcbPrev - (pb - pabPrev);
443 if ( !memcmp(pb + 1, &pabNeedle[1], cbPrev - 1)
444 && !memcmp(pbPage, pabNeedle + cbPrev, cbNeedle - cbPrev))
445 {
446 if (cbNeedle - cbPrev > cb)
447 return false;
448 *poff = -(int32_t)cbPrev;
449 return true;
450 }
451 }
452
453 *pcbPrev = 0;
454 }
455
456 /*
457 * Match the body of the page.
458 */
459 const uint8_t *pb = pbPage + *poff;
460 const uint8_t *pbEnd = pb + cb;
461 for (;;)
462 {
463 pb = pgmR3DbgAlignedMemChr(pb, *pabNeedle, cb, uAlign);
464 if (!pb)
465 break;
466 cb = pbEnd - pb;
467 if (cb >= cbNeedle)
468 {
469 /* match? */
470 if (!memcmp(pb + 1, &pabNeedle[1], cbNeedle - 1))
471 {
472 *poff = pb - pbPage;
473 return true;
474 }
475 }
476 else
477 {
478 /* paritial match at the end of the page? */
479 if (!memcmp(pb + 1, &pabNeedle[1], cb - 1))
480 {
481 /* We're copying one byte more that we really need here, but wtf. */
482 memcpy(pabPrev, pb, cb);
483 *pcbPrev = cb;
484 return false;
485 }
486 }
487
488 /* no match, skip ahead. */
489 if (cb <= uAlign)
490 break;
491 pb += uAlign;
492 cb -= uAlign;
493 }
494
495 return false;
496}
497
498
499/**
500 * Scans guest physical memory for a byte string.
501 *
502 * @returns VBox status codes:
503 * @retval VINF_SUCCESS and *pGCPtrHit on success.
504 * @retval VERR_DBGF_MEM_NOT_FOUND if not found.
505 * @retval VERR_INVALID_POINTER if any of the pointer arguments are invalid.
506 * @retval VERR_INVALID_ARGUMENT if any other arguments are invalid.
507 *
508 * @param pVM Pointer to the shared VM structure.
509 * @param GCPhys Where to start searching.
510 * @param cbRange The number of bytes to search.
511 * @param GCPhysAlign The alignment of the needle. Must be a power of two
512 * and less or equal to 4GB.
513 * @param pabNeedle The byte string to search for.
514 * @param cbNeedle The length of the byte string. Max 256 bytes.
515 * @param pGCPhysHit Where to store the address of the first occurence on success.
516 */
517VMMR3DECL(int) PGMR3DbgScanPhysical(PVM pVM, RTGCPHYS GCPhys, RTGCPHYS cbRange, RTGCPHYS GCPhysAlign,
518 const uint8_t *pabNeedle, size_t cbNeedle, PRTGCPHYS pGCPhysHit)
519{
520 /*
521 * Validate and adjust the input a bit.
522 */
523 if (!VALID_PTR(pGCPhysHit))
524 return VERR_INVALID_POINTER;
525 *pGCPhysHit = NIL_RTGCPHYS;
526
527 if ( !VALID_PTR(pabNeedle)
528 || GCPhys == NIL_RTGCPHYS)
529 return VERR_INVALID_POINTER;
530 if (!cbNeedle)
531 return VERR_INVALID_PARAMETER;
532 if (cbNeedle > MAX_NEEDLE_SIZE)
533 return VERR_INVALID_PARAMETER;
534
535 if (!cbRange)
536 return VERR_DBGF_MEM_NOT_FOUND;
537 if (GCPhys + cbNeedle - 1 < GCPhys)
538 return VERR_DBGF_MEM_NOT_FOUND;
539
540 if (!GCPhysAlign)
541 return VERR_INVALID_PARAMETER;
542 if (GCPhysAlign > UINT32_MAX)
543 return VERR_NOT_POWER_OF_TWO;
544 if (GCPhysAlign & (GCPhysAlign - 1))
545 return VERR_INVALID_PARAMETER;
546
547 if (GCPhys & (GCPhysAlign - 1))
548 {
549 RTGCPHYS Adj = GCPhysAlign - (GCPhys & (GCPhysAlign - 1));
550 if ( cbRange <= Adj
551 || GCPhys + Adj < GCPhys)
552 return VERR_DBGF_MEM_NOT_FOUND;
553 GCPhys += Adj;
554 cbRange -= Adj;
555 }
556
557 const bool fAllZero = ASMMemIsAll8(pabNeedle, cbNeedle, 0) == NULL;
558 const uint32_t cIncPages = GCPhysAlign <= PAGE_SIZE
559 ? 1
560 : GCPhysAlign >> PAGE_SHIFT;
561 const RTGCPHYS GCPhysLast = GCPhys + cbRange - 1 >= GCPhys
562 ? GCPhys + cbRange - 1
563 : ~(RTGCPHYS)0;
564
565 /*
566 * Search the memory - ignore MMIO and zero pages, also don't
567 * bother to match across ranges.
568 */
569 pgmLock(pVM);
570 for (PPGMRAMRANGE pRam = pVM->pgm.s.CTX_SUFF(pRamRanges);
571 pRam;
572 pRam = pRam->CTX_SUFF(pNext))
573 {
574 /*
575 * If the search range starts prior to the current ram range record,
576 * adjust the search range and possibly conclude the search.
577 */
578 RTGCPHYS off;
579 if (GCPhys < pRam->GCPhys)
580 {
581 if (GCPhysLast < pRam->GCPhys)
582 break;
583 GCPhys = pRam->GCPhys;
584 off = 0;
585 }
586 else
587 off = GCPhys - pRam->GCPhys;
588 if (off < pRam->cb)
589 {
590 /*
591 * Iterate the relevant pages.
592 */
593 uint8_t abPrev[MAX_NEEDLE_SIZE];
594 size_t cbPrev = 0;
595 const uint32_t cPages = pRam->cb >> PAGE_SHIFT;
596 uint32_t iPage = off >> PAGE_SHIFT;
597 uint32_t offPage = GCPhys & PAGE_OFFSET_MASK;
598 GCPhys &= ~(RTGCPHYS)PAGE_OFFSET_MASK;
599 for (;; offPage = 0)
600 {
601 PPGMPAGE pPage = &pRam->aPages[iPage];
602 if ( ( !PGM_PAGE_IS_ZERO(pPage)
603 || fAllZero)
604 && !PGM_PAGE_IS_MMIO(pPage))
605 {
606 void const *pvPage;
607 PGMPAGEMAPLOCK Lock;
608 int rc = PGMPhysGCPhys2CCPtrReadOnly(pVM, GCPhys, &pvPage, &Lock);
609 if (RT_SUCCESS(rc))
610 {
611 int32_t offHit = offPage;
612 bool fRc;
613 if (GCPhysAlign < PAGE_SIZE)
614 {
615 uint32_t cbSearch = (GCPhys ^ GCPhysLast) & ~(RTGCPHYS)PAGE_OFFSET_MASK
616 ? PAGE_SIZE - (uint32_t)offPage
617 : (GCPhysLast & PAGE_OFFSET_MASK) + 1 - (uint32_t)offPage;
618 fRc = pgmR3DbgScanPage((uint8_t const *)pvPage, &offHit, cbSearch, (uint32_t)GCPhysAlign,
619 pabNeedle, cbNeedle, &abPrev[0], &cbPrev);
620 }
621 else
622 fRc = memcmp(pvPage, pabNeedle, cbNeedle) == 0
623 && (GCPhysLast - GCPhys) >= cbNeedle;
624 PGMPhysReleasePageMappingLock(pVM, &Lock);
625 if (fRc)
626 {
627 *pGCPhysHit = GCPhys + offHit;
628 pgmUnlock(pVM);
629 return VINF_SUCCESS;
630 }
631 }
632 else
633 cbPrev = 0; /* ignore error. */
634 }
635 else
636 cbPrev = 0;
637
638 /* advance to the next page. */
639 GCPhys += (RTGCPHYS)cIncPages << PAGE_SHIFT;
640 if (GCPhys >= GCPhysLast) /* (may not always hit, but we're run out of ranges.) */
641 {
642 pgmUnlock(pVM);
643 return VERR_DBGF_MEM_NOT_FOUND;
644 }
645 iPage += cIncPages;
646 if ( iPage < cIncPages
647 || iPage >= cPages)
648 break;
649 }
650 }
651 }
652 pgmUnlock(pVM);
653 return VERR_DBGF_MEM_NOT_FOUND;
654}
655
656
657/**
658 * Scans (guest) virtual memory for a byte string.
659 *
660 * @returns VBox status codes:
661 * @retval VINF_SUCCESS and *pGCPtrHit on success.
662 * @retval VERR_DBGF_MEM_NOT_FOUND if not found.
663 * @retval VERR_INVALID_POINTER if any of the pointer arguments are invalid.
664 * @retval VERR_INVALID_ARGUMENT if any other arguments are invalid.
665 *
666 * @param pVM Pointer to the shared VM structure.
667 * @param pVCpu The CPU context to search in.
668 * @param GCPtr Where to start searching.
669 * @param GCPtrAlign The alignment of the needle. Must be a power of two
670 * and less or equal to 4GB.
671 * @param cbRange The number of bytes to search. Max 256 bytes.
672 * @param pabNeedle The byte string to search for.
673 * @param cbNeedle The length of the byte string.
674 * @param pGCPtrHit Where to store the address of the first occurence on success.
675 */
676VMMR3DECL(int) PGMR3DbgScanVirtual(PVM pVM, PVMCPU pVCpu, RTGCPTR GCPtr, RTGCPTR cbRange, RTGCPTR GCPtrAlign,
677 const uint8_t *pabNeedle, size_t cbNeedle, PRTGCUINTPTR pGCPtrHit)
678{
679 VMCPU_ASSERT_EMT(pVCpu);
680
681 /*
682 * Validate and adjust the input a bit.
683 */
684 if (!VALID_PTR(pGCPtrHit))
685 return VERR_INVALID_POINTER;
686 *pGCPtrHit = 0;
687
688 if (!VALID_PTR(pabNeedle))
689 return VERR_INVALID_POINTER;
690 if (!cbNeedle)
691 return VERR_INVALID_PARAMETER;
692 if (cbNeedle > MAX_NEEDLE_SIZE)
693 return VERR_INVALID_PARAMETER;
694
695 if (!cbRange)
696 return VERR_DBGF_MEM_NOT_FOUND;
697 if (GCPtr + cbNeedle - 1 < GCPtr)
698 return VERR_DBGF_MEM_NOT_FOUND;
699
700 if (!GCPtrAlign)
701 return VERR_INVALID_PARAMETER;
702 if (GCPtrAlign > UINT32_MAX)
703 return VERR_NOT_POWER_OF_TWO;
704 if (GCPtrAlign & (GCPtrAlign - 1))
705 return VERR_INVALID_PARAMETER;
706
707 if (GCPtr & (GCPtrAlign - 1))
708 {
709 RTGCPTR Adj = GCPtrAlign - (GCPtr & (GCPtrAlign - 1));
710 if ( cbRange <= Adj
711 || GCPtr + Adj < GCPtr)
712 return VERR_DBGF_MEM_NOT_FOUND;
713 GCPtr += Adj;
714 cbRange -= Adj;
715 }
716
717 /*
718 * Search the memory - ignore MMIO, zero and not-present pages.
719 */
720 const bool fAllZero = ASMMemIsAll8(pabNeedle, cbNeedle, 0) == NULL;
721 PGMMODE enmMode = PGMGetGuestMode(pVCpu);
722 RTGCPTR GCPtrMask = PGMMODE_IS_LONG_MODE(enmMode) ? UINT64_MAX : UINT32_MAX;
723 uint8_t abPrev[MAX_NEEDLE_SIZE];
724 size_t cbPrev = 0;
725 const uint32_t cIncPages = GCPtrAlign <= PAGE_SIZE
726 ? 1
727 : GCPtrAlign >> PAGE_SHIFT;
728 const RTGCPTR GCPtrLast = GCPtr + cbRange - 1 >= GCPtr
729 ? (GCPtr + cbRange - 1) & GCPtrMask
730 : GCPtrMask;
731 RTGCPTR cPages = (((GCPtrLast - GCPtr) + (GCPtr & PAGE_OFFSET_MASK)) >> PAGE_SHIFT) + 1;
732 uint32_t offPage = GCPtr & PAGE_OFFSET_MASK;
733 GCPtr &= ~(RTGCPTR)PAGE_OFFSET_MASK;
734 for (;; offPage = 0)
735 {
736 RTGCPHYS GCPhys;
737 int rc = PGMPhysGCPtr2GCPhys(pVCpu, GCPtr, &GCPhys);
738 if (RT_SUCCESS(rc))
739 {
740 PPGMPAGE pPage = pgmPhysGetPage(&pVM->pgm.s, GCPhys);
741 if ( pPage
742 && ( !PGM_PAGE_IS_ZERO(pPage)
743 || fAllZero)
744 && !PGM_PAGE_IS_MMIO(pPage))
745 {
746 void const *pvPage;
747 PGMPAGEMAPLOCK Lock;
748 rc = PGMPhysGCPhys2CCPtrReadOnly(pVM, GCPhys, &pvPage, &Lock);
749 if (RT_SUCCESS(rc))
750 {
751 int32_t offHit = offPage;
752 bool fRc;
753 if (GCPtrAlign < PAGE_SIZE)
754 {
755 uint32_t cbSearch = cPages > 0
756 ? PAGE_SIZE - (uint32_t)offPage
757 : (GCPtrLast & PAGE_OFFSET_MASK) + 1 - (uint32_t)offPage;
758 fRc = pgmR3DbgScanPage((uint8_t const *)pvPage, &offHit, cbSearch, (uint32_t)GCPtrAlign,
759 pabNeedle, cbNeedle, &abPrev[0], &cbPrev);
760 }
761 else
762 fRc = memcmp(pvPage, pabNeedle, cbNeedle) == 0
763 && (GCPtrLast - GCPtr) >= cbNeedle;
764 PGMPhysReleasePageMappingLock(pVM, &Lock);
765 if (fRc)
766 {
767 *pGCPtrHit = GCPtr + offHit;
768 return VINF_SUCCESS;
769 }
770 }
771 else
772 cbPrev = 0; /* ignore error. */
773 }
774 else
775 cbPrev = 0;
776 }
777 else
778 cbPrev = 0; /* ignore error. */
779
780 /* advance to the next page. */
781 if (cPages <= cIncPages)
782 break;
783 cPages -= cIncPages;
784 GCPtr += (RTGCPTR)cIncPages << PAGE_SHIFT;
785 }
786 return VERR_DBGF_MEM_NOT_FOUND;
787}
788
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette