IOS开发-几种截屏方法

时间:2024-05-13 00:06:31

IOS开发-几种截屏方法

1.
        UIGraphicsBeginImageContextWithOptions(pageView.page.bounds.size, YES, zoomScale);
        [pageView.page.layer renderInContext:UIGraphicsGetCurrentContext()];
        UIImage *uiImage = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext();
2.
- (UIImage *) glToUIImage {
       DWScrollView *pageView = [self getActivePageView];
       pageView.page.backgroundColor = [UIColor clearColor];
       // self.backgroundColor=[UIColor clearColor];
       NSInteger myDataLength = 320 * 308 * 4;
    
        // allocate array and read pixels into it.
        GLubyte *buffer = (GLubyte *) malloc(myDataLength);
        glReadPixels(0, 0, 320, 308, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
    
        // gl renders "upside down" so swap top to bottom into new array.
        // there's gotta be a better way, but this works.
        GLubyte *buffer2 = (GLubyte *) malloc(myDataLength);
    
        for(int y = 0; y <308; y++)
        {
            for(int x = 0; x <320 * 4; x++)
            {
                if(buffer[y* 4 * 320 + x]==0)
                    buffer2[(307 - y) * 320 * 4 + x]=1;
                else
                    buffer2[(307 - y) * 320 * 4 + x] = buffer[y* 4 * 320 + x];
            }
        }
    
    // make data provider with data.
    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, NULL);
    
    // prep the ingredients
    int bitsPerComponent = 8;
    int bitsPerPixel = 32;
    int bytesPerRow = 4 * 320;
    CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
    CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
    CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
    
    // make the cgimage
    CGImageRef
imageRef = CGImageCreate(320, 308, bitsPerComponent, bitsPerPixel,
bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO,
renderingIntent);
    
    // then make the uiimage from that
    UIImage *myImage = [UIImage imageWithCGImage:imageRef];
    UIImageWriteToSavedPhotosAlbum(myImage, nil, nil, nil);
    return myImage;
}

3.
// get screen
- (void)grabScreen {
    unsigned char buffer[320*480*4];
    glReadPixels(0,0,320,480,GL_RGBA,GL_UNSIGNED_BYTE,&buffer);
    
    CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, &buffer, 320*480*4, NULL);
    CGImageRef
iref =
CGImageCreate(320,480,8,32,320*4,CGColorSpaceCreateDeviceRGB(),kCGBitmapByteOrderDefault,ref,NULL,true,kCGRenderingIntentDefault);
    CGFloat width = CGImageGetWidth(iref);
    CGFloat height = CGImageGetHeight(iref);
    size_t length = width*height*4;
    uint32_t *pixels = (uint32_t *)malloc(length);
    CGContextRef
context = CGBitmapContextCreate(pixels, width, height, 8, 320*4,
CGImageGetColorSpace(iref), kCGImageAlphaLast |
kCGBitmapByteOrder32Big);
    CGContextTranslateCTM(context, 0.0, height);
    CGContextScaleCTM(context, 1.0, -1.0);
    CGContextDrawImage(context, CGRectMake(0.0, 0.0, width, height), iref);
    CGImageRef outputRef = CGBitmapContextCreateImage(context);
    UIImage *outputImage = [UIImage imageWithCGImage:outputRef];
    
    UIImageWriteToSavedPhotosAlbum(outputImage, nil, nil, nil); 
    
    CGContextRelease(context);
    CGImageRelease(iref);
    CGDataProviderRelease(ref);

4.
CGImageRef UIGetScreenImage();
void SaveScreenImage(NSString *path)
{
    NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
    CGImageRef cgImage = UIGetScreenImage();
        void *imageBytes = NULL;
        if (cgImage == NULL) {
                CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
                imageBytes = malloc(320 * 480 * 4);
                CGContextRef
context = CGBitmapContextCreate(imageBytes, 320, 480, 8, 320 * 4,
colorspace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Big);
                CGColorSpaceRelease(colorspace);
                for (UIWindow *window in [[UIApplication sharedApplication] windows]) {
                        CGRect bounds = [window bounds];
                        CALayer *layer = [window layer];
                        CGContextSaveGState(context);
                        if ([layer contentsAreFlipped]) {
                                CGContextTranslateCTM(context, 0.0f, bounds.size.height);
                                CGContextScaleCTM(context, 1.0f, -1.0f);
                        }
                        [layer renderInContext:(CGContextRef)context];
                        CGContextRestoreGState(context);
                }
                cgImage = CGBitmapContextCreateImage(context);
                CGContextRelease(context);
        }
    NSData *pngData = UIImagePNGRepresentation([UIImage imageWithCGImage:cgImage]);
    CGImageRelease(cgImage);
        if (imageBytes)
                free(imageBytes);
    [pngData writeToFile:path atomically:YES];
    [pool release];
}

5.
  + (UIImage *)imageWithScreenContents
{
     CGImageRef cgScreen = UIGetScreenImage();
     if (cgScreen) {
         UIImage *result = [UIImage imageWithCGImage:cgScreen];
         CGImageRelease(cgScreen);
         return result;
     }
     return nil;
}

在程序中如何把两张图片合成为一张图片   
- (UIImage *)addImage:(UIImage *)image1 toImage:(UIImage *)image2 {  
    UIGraphicsBeginImageContext(image1.size);

// Draw image1  
    [image1 drawInRect:CGRectMake(0, 0, image1.size.width, image1.size.height)];

// Draw image2  
    [image2 drawInRect:CGRectMake(0, 0, image2.size.width, image2.size.height)];

UIImage *resultingImage = UIGraphicsGetImageFromCurrentImageContext();

UIGraphicsEndImageContext();

return resultingImage;  
}