从Snow Leopard系统开始,苹果直接提供了截取屏幕的API,使得之前通过读取分析屏幕信息进行截屏的方式大大简化。
#include "OpenGL/gl.h" + (NSImage *)capture { CGImageRef imageRef = CGDisplayCreateImage(kCGDirectMainDisplay); NSImage *image = [[NSImage alloc]initWithCGImage:imageRef size:NSZeroSize]; return image; }
而在10.5之前,则类似于这样的代码:
+ (NSImage *)captureOld { NSEnumerator * enumerator = [[NSScreen screens] objectEnumerator]; id obj = nil; NSRect screenRect; NSScreen * useThisScreen = [NSScreen mainScreen]; // NSRect rect = NSMakeRect(0, 0, 256, 256); NSRect rect = [useThisScreen frame]; while (obj = [enumerator nextObject]) { screenRect = [obj frame]; if ((rect.origin.x >= screenRect.origin.x) && (rect.origin.x <= screenRect.origin.x + screenRect.size.width) && (rect.origin.y >= screenRect.origin.y) && (rect.origin.y <= screenRect.origin.y + screenRect.size.height)) { useThisScreen = obj; } } if (nil == useThisScreen) { NSLog(@"unable to find a valid screen"); return nil; } NSDictionary * descr = [useThisScreen deviceDescription]; CGDirectDisplayID display = (CGDirectDisplayID)[[descr valueForKey:@"NSScreenNumber"] unsignedIntValue]; CGRect srcRect; srcRect.origin.x = rect.origin.x - [useThisScreen frame].origin.x; srcRect.origin.y = rect.origin.y - [useThisScreen frame].origin.y; srcRect.size.height = rect.size.height; srcRect.size.width = rect.size.width; CGContextRef bitmap; CGColorSpaceRef cSpace = CGColorSpaceCreateDeviceRGB(); CGLContextObj glContextObj; CGLPixelFormatObj pixelFormatObj; long numPixelFormats; CGLPixelFormatAttribute attribs[] = { kCGLPFAFullScreen, kCGLPFADisplayMask, 0, // Display mask bit goes here 0 } ; if ( display == kCGNullDirectDisplay ) display = CGMainDisplayID(); attribs[2] = CGDisplayIDToOpenGLDisplayMask(display); // Build a full-screen GL context CGLChoosePixelFormat( attribs, &pixelFormatObj, &numPixelFormats ); CGLCreateContext( pixelFormatObj, NULL, &glContextObj ) ; CGLDestroyPixelFormat( pixelFormatObj ) ; CGLSetCurrentContext( glContextObj ) ; CGLSetFullScreen( glContextObj ) ; glReadBuffer(GL_FRONT); void * data; long bytewidth; GLint width, height; long bytes; width = srcRect.size.width; height = srcRect.size.height; bytewidth = width * 4; // Assume 4 bytes/pixel for now bytewidth = (bytewidth + 3) & ~3; // Align to 4 bytes bytes = bytewidth * height; // width * height // Build bitmap context _ data = malloc(height * bytewidth); bitmap = CGBitmapContextCreate(data, width, height, 8, bytewidth, cSpace, kCGImageAlphaNoneSkipFirst); CFRelease(cSpace); // Read framebuffer into our bitmap glFinish(); //Finish all OpenGL commands glPixelStorei(GL_PACK_ALIGNMENT, 4); // Force 4-byte alignment glPixelStorei(GL_PACK_ROW_LENGTH, 0); glPixelStorei(GL_PACK_SKIP_ROWS, 0); glPixelStorei(GL_PACK_SKIP_PIXELS, 0); glReadPixels((GLint)srcRect.origin.x, (GLint)srcRect.origin.y, width, height, GL_BGRA, GL_UNSIGNED_BYTE,data); int i, j; unsigned char *tmpdata = (unsigned char *)data; char temp[4] = {0}; for (i=0; i < height / 2; i ++) { for (j = 0; j < width; j ++) { memcpy(temp, tmpdata + bytewidth * i + j * 4, 4); memcpy(tmpdata + bytewidth * i + j * 4, tmpdata + bytewidth * (height - i - 1) + j * 4, 4); memcpy(tmpdata + bytewidth * (height - i - 1) + j * 4, temp, 4); } } CGImageRef newImage = CGBitmapContextCreateImage(bitmap); NSImage *img = [[NSImage alloc]initWithCGImage:newImage size:NSZeroSize]; return img; }