#if _DBG_MEMCPY_INLINE_ && !defined(MIDL_PASS) && !defined(_MEMCPY_INLINE_) && !defined(_CRTBLD)
#define _MEMCPY_INLINE_
FORCEINLINE
PVOID
__cdecl
memcpy_inline (
__out_bcount_full(size) void *dst,
__in_bcount(size) const void *src,
__in size_t size
)
{
//
// Make sure the source and destination do not overlap such that the
// move destroys the destination.
//
if (((char *)dst > (char *)src) &&
((char *)dst < ((char *)src + size))) {
__debugbreak();
}
return memcpy(dst, src, size);
}
#define memcpy memcpy_inline
#endif
#if (NTDDI_VERSION >= NTDDI_WIN2K)
__checkReturn
NTSYSAPI
SIZE_T
NTAPI
RtlCompareMemory (
__in const VOID *Source1,
__in const VOID *Source2,
__in SIZE_T Length
);
#endif
#define RtlEqualMemory(Destination,Source,Length) (!memcmp((Destination),(Source),(Length)))
#define RtlMoveMemory(Destination,Source,Length) memmove((Destination),(Source),(Length))
#define RtlCopyMemory(Destination,Source,Length) memcpy((Destination),(Source),(Length))
#define RtlFillMemory(Destination,Length,Fill) memset((Destination),(Fill),(Length))
#define RtlZeroMemory(Destination,Length) memset((Destination),0,(Length))
#if !defined(MIDL_PASS)
FORCEINLINE
PVOID
RtlSecureZeroMemory(
__out_bcount_full(cnt) PVOID ptr,
__in SIZE_T cnt
)
{
volatile char *vptr = (volatile char *)ptr;
#if defined(_M_AMD64)
__stosb((PUCHAR)((ULONG64)vptr), 0, cnt);
#else
while (cnt) {
*vptr = 0;
vptr++;
cnt--;
}
#endif
return ptr;
}
#endif
#define RtlCopyBytes RtlCopyMemory
#define RtlZeroBytes RtlZeroMemory
#define RtlFillBytes RtlFillMemory
#if defined(_M_AMD64)
NTSYSAPI
VOID
NTAPI
RtlCopyMemoryNonTemporal (
__out_bcount_full(Length) VOID UNALIGNED *Destination,
__in_bcount(Length) CONST VOID UNALIGNED *Source,
__in SIZE_T Length
);
#else
#define RtlCopyMemoryNonTemporal RtlCopyMemory
#endif
#if (NTDDI_VERSION >= NTDDI_WIN2KSP3)
NTSYSAPI
VOID
FASTCALL
RtlPrefetchMemoryNonTemporal(
__in PVOID Source,
__in SIZE_T Length
);
#endif
//
// Define kernel debugger print prototypes and macros.
//
// N.B. The following function cannot be directly imported because there are
// a few places in the source tree where this function is redefined.
//
VOID ProbeForWrite (__inout_bcount(Length) PVOID Address,__in SIZE_T Length,__in ULONG Alignment)
{
ULONG_PTR EndAddress;
ULONG_PTR StartAddress;
#define PageSize PAGE_SIZE
if (Length != 0) {
ASSERT((Alignment == 1) || (Alignment == 2) ||
(Alignment == 4) || (Alignment == 8) ||
(Alignment == 16));//对齐参数有误
StartAddress = (ULONG_PTR)Address;
if ((StartAddress & (Alignment - 1)) == 0) {
EndAddress = StartAddress + Length - 1;//计算最后一个字节地址
if ((StartAddress <= EndAddress) &&
(EndAddress < MM_USER_PROBE_ADDRESS)) {
EndAddress = (EndAddress & ~(PageSize - 1)) + PageSize;
do {//探测每个内存页的第一个字节,运行在Wow64下的进程内存页大小4k,比PageSize小???????
*(volatile CHAR *)StartAddress = *(volatile CHAR *)StartAddress;
StartAddress = (StartAddress & ~(PageSize - 1)) + PageSize;
} while (StartAddress != EndAddress);
return;
} else {
ExRaiseAccessViolation();
}
} else {
ExRaiseDatatypeMisalignment();//地址不对齐则引发异常
}
}
return;
}
VOID ProbeForRead( __in_bcount(Length) VOID *Address,__in SIZE_T Length,__in ULONG Alignment)
{
PAGED_CODE();
ASSERT((Alignment == 1) || (Alignment == 2) ||
(Alignment == 4) || (Alignment == 8) ||
(Alignment == 16));
if (Length != 0) {
if (((ULONG_PTR)Address & (Alignment - 1)) != 0) {
ExRaiseDatatypeMisalignment();//地址不对齐则引发异常
} else if ((((ULONG_PTR)Address + Length) > (ULONG_PTR)MM_USER_PROBE_ADDRESS) ||//如果起始地址超过用户空间地址
(((ULONG_PTR)Address + Length) < (ULONG_PTR)Address)) {//或探测范围超过全部地址范围
*(volatile UCHAR * const)MM_USER_PROBE_ADDRESS = 0;//则强制引发异常
}
}
}