1

I'm currently attempting to write a plugin for macOS for Unity. I am taking a screenshot of the desktop with CGWindowListCreateImage. I'm trying to figure out how to return the byte[] data to C# so I can create a Texture2D from it. Any help would be greatly appreciated, thank you.

It doesn't want me to return a NSArray* The .h file is at the bottom.

NSArray* getScreenshot()
{
    CGImageRef screenShot = CGWindowListCreateImage( CGRectInfinite, kCGWindowListOptionOnScreenOnly, kCGNullWindowID, kCGWindowImageDefault);

    return getRGBAsFromImage(screenShot);
}

NSArray* getRGBAsFromImage(CGImageRef imageRef)
{
    // First get the image into your data buffer
    NSUInteger width = CGImageGetWidth(imageRef);
    NSUInteger height = CGImageGetHeight(imageRef);
    NSUInteger bytesPerPixel = 4;
    unsigned long count = width * height * bytesPerPixel;

    NSMutableArray *result = [NSMutableArray arrayWithCapacity:count];

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    unsigned char *rawData = (unsigned char*) alloca(height * width * 4);

    NSUInteger bytesPerRow = bytesPerPixel * width;
    NSUInteger bitsPerComponent = 8;
    CGContextRef context = CGBitmapContextCreate(rawData, width, height,
                    bitsPerComponent, bytesPerRow, colorSpace,
                    kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
    CGColorSpaceRelease(colorSpace);

    CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
    CGContextRelease(context);

    // Now your rawData contains the image data in the RGBA8888 pixel format.
    int x = 0, y = 0;
    NSUInteger byteIndex = (bytesPerRow * y) + x * bytesPerPixel;

    for (int i = 0 ; i < count ; ++i)
    {
        CGFloat alpha = ((CGFloat) rawData[byteIndex + 3] ) / 255.0f;
        CGFloat red   = ((CGFloat) rawData[byteIndex]     ) / alpha;
        CGFloat green = ((CGFloat) rawData[byteIndex + 1] ) / alpha;
        CGFloat blue  = ((CGFloat) rawData[byteIndex + 2] ) / alpha;
        byteIndex += bytesPerPixel;

        NSColor *acolor = [NSColor colorWithRed:red green:green blue:blue alpha:alpha];
        [result insertObject:acolor atIndex:count];

    }

  free(rawData);

  return result;
}


#ifndef TestMethods_hpp
#define TestMethods_hpp
#import <Foundation/Foundation.h>
#include <Carbon/Carbon.h>
#include <stdio.h>
#include <AppKit/AppKit.h>
typedef void (*Unity_Callback1)(char * message);

extern "C" {
    NSArray* getScreenshot();
}
#endif /* TestMethods_h */
Hamid Yusifli
  • 9,688
  • 2
  • 24
  • 48
Evan
  • 11
  • 1
  • What is your graphics api? – Hamid Yusifli Feb 17 '20 at 17:01
  • Whatever MacOS and Unity3d uses. That might be OpenGL I'm not sure. – Evan Feb 17 '20 at 17:13
  • Ultimately I just want to extract the bytes I need from screenShot into a byte[]. I think if I can get that done, then sending it to Unity will be straight forward. – Evan Feb 17 '20 at 17:15
  • For me to answer your question you should provide your [Graphics API](https://docs.unity3d.com/Manual/GraphicsAPIs.html) first. – Hamid Yusifli Feb 17 '20 at 19:17
  • Color Space :: Gama; Auto Graphics API for Windows :: Checked; Auto Graphics API for Mac :: Checked; Auto Graphics API for Linux :: Checked; Color Gamut for Mac :: sRGB; – Evan Feb 17 '20 at 21:54
  • It looks like my options for Graphics API for mac when auto is not checked is Metal and OpenGLCore. – Evan Feb 17 '20 at 21:58

0 Answers0