Reply To: Pushing frames to embedded hardware via Syphon client?

Home Forums Syphon Syphon Development – Developer Pushing frames to embedded hardware via Syphon client? Reply To: Pushing frames to embedded hardware via Syphon client?

#22036
douglas
Participant

Here’s the relevant bit of code:

//
// OfflineRenderer.m
// Simple Client
//
// Created by Douglas Heriot on 17/05/13.
//
//
#import "OfflineRenderer.h"
// Based on:
// http://lists.apple.com/archives/mac-opengl/2010/Jun/msg00080.html
#import <OpenGL/OpenGL.h>
#import <OpenGL/glu.h> // for gluCheckExtension
#import <AppKit/AppKit.h> // for NSOpenGL…
// Simple error reporting macros to help keep the sample code clean
#define REPORTGLERROR(task) { GLenum tGLErr = glGetError(); if (tGLErr != GL_NO_ERROR) { fprintf(stderr, "OpenGL error %d while %s\n", tGLErr, task); } }
#define REPORT_ERROR_AND_EXIT(desc) { fprintf(stderr, "%s\n", desc); exit(1); }
#define NULL_ERROR_EXIT(test, desc) { if (!test) REPORT_ERROR_AND_EXIT(desc); }
@interface OfflineRenderer()
{
NSOpenGLPixelBuffer* pixBuf;
NSOpenGLContext* openGLContext;
NSOpenGLPixelFormat* pixFormat;
int outputWidth, outputHeight;
GLuint renderBuffer;
}
@property NSSize outputSize;
@property uint8_t *pixelData;
@end
@implementation OfflineRenderer
– (id)init
{
if(self = [super init])
{
self.outputSize = NSMakeSize(10, 15);
// Set up pixel format. Not actually used? (Framebuffer renderbuffer gets its own format again)
NSOpenGLPixelFormatAttribute attributes[] = {
NSOpenGLPFAPixelBuffer,
NSOpenGLPFANoRecovery,
NSOpenGLPFAAccelerated,
NSOpenGLPFADepthSize, 24,
(NSOpenGLPixelFormatAttribute) 0
};
pixFormat = [[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
// Create an OpenGL pixel buffer
pixBuf = [[NSOpenGLPixelBuffer alloc] initWithTextureTarget:GL_TEXTURE_RECTANGLE_EXT textureInternalFormat:GL_RGBA textureMaxMipMapLevel:0 pixelsWide:32 pixelsHigh:32];
NULL_ERROR_EXIT(pixBuf, "Unable to create NSOpenGLPixelBuffer");
// Create the OpenGL context to render with (with color and depth buffers)
openGLContext = [[NSOpenGLContext alloc] initWithFormat:pixFormat shareContext:nil];
NULL_ERROR_EXIT(openGLContext, "Unable to create NSOpenGLContext");
[openGLContext setPixelBuffer:pixBuf cubeMapFace:0 mipMapLevel:0 currentVirtualScreen:[openGLContext currentVirtualScreen]];
[openGLContext makeCurrentContext];
/*
* Test if framebuffer objects are supported
*/
const GLubyte* strExt = glGetString(GL_EXTENSIONS);
GLboolean fboSupported = gluCheckExtension((const GLubyte*)"GL_EXT_framebuffer_object", strExt);
if (!fboSupported)
REPORT_ERROR_AND_EXIT("Your system does not support framebuffer extension – unable to render scene");
/*
* Create an FBO (frame buffer, and colour render buffer)
*/
outputWidth = _outputSize.width;
outputHeight = _outputSize.height; // <– pixel size of the rendered scene – hardcoded values for testing
// Render buffer to use for imaging
glGenRenderbuffersEXT(1, &renderBuffer);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, renderBuffer);
glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_RGBA8, outputWidth, outputHeight);
REPORTGLERROR("creating color render buffer");
GLuint fbo = 0;
glGenFramebuffersEXT(1, &fbo);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fbo);
REPORTGLERROR("binding framebuffer");
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_RENDERBUFFER_EXT, renderBuffer);
REPORTGLERROR("specifying color render buffer");
if (glCheckFramebufferStatusEXT(GL_FRAMEBUFFER_EXT) != GL_FRAMEBUFFER_COMPLETE_EXT)
REPORT_ERROR_AND_EXIT("Problem with OpenGL framebuffer after specifying color render buffer.");
self.pixelData = (uint8_t *)calloc(outputWidth * outputHeight, 4); // RGBA
}
return self;
}
– (void)renderFrame:(SyphonClient *)client
{
[openGLContext makeCurrentContext];
SyphonImage *image = [client newFrameImageForContext:(CGLContextObj)[openGLContext CGLContextObj]];
if(image)
{
NSSize imageSize = image.textureSize;
GLuint imageWidth = imageSize.width, imageHeight = imageSize.height;
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
// Setup OpenGL states
glViewport(0, 0, imageWidth, imageHeight);
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
glOrtho(0.0, imageWidth, imageHeight, 0.0, –1, 1);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glLoadIdentity();
{
glEnable(GL_TEXTURE_RECTANGLE_ARB);
glBindTexture(GL_TEXTURE_RECTANGLE_ARB, [image textureName]);
// do a nearest linear interp.
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_RECTANGLE_ARB, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glColor4f(1.0, 1.0, 1.0, 1.0);
// why do we need it ?
glDisable(GL_BLEND);
NSSize scaled;
float wr = imageSize.width / outputWidth;
float hr = imageSize.height / outputHeight;
float ratio;
ratio = (hr < wr ? wr : hr);
scaled = NSMakeSize((imageSize.width / ratio), (imageSize.height / ratio));
GLfloat tex_coords[] =
{
0.0, 0.0,
imageSize.width, 0.0,
imageSize.width, imageSize.height,
0.0, imageSize.height
};
float halfw = scaled.width * 0.5;
float halfh = scaled.height * 0.5;
GLfloat verts[] =
{
-halfw, -halfh,
halfw, -halfh,
halfw, halfh,
-halfw, halfh
};
glTranslated(outputWidth * 0.5, outputHeight * 0.5, 0.0);
glEnableClientState( GL_TEXTURE_COORD_ARRAY );
glTexCoordPointer(2, GL_FLOAT, 0, tex_coords );
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(2, GL_FLOAT, 0, verts );
glDrawArrays( GL_TRIANGLE_FAN, 0, 4 );
glDisableClientState( GL_TEXTURE_COORD_ARRAY );
glDisableClientState(GL_VERTEX_ARRAY);
// Unbind the texture
glBindTexture(GL_TEXTURE_RECTANGLE_ARB, 0);
}
// Restore OpenGL states
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glReadPixels(0, 0, outputWidth, outputHeight, GL_RGBA, GL_UNSIGNED_BYTE, _pixelData);
// NSLog(@"%u x %u – A pixel: %u, %u, %u", imageWidth, imageHeight, _pixelData[10], _pixelData[11], _pixelData[12]);
}
}
– (void)dealloc
{
free(_pixelData);
}
@end

view raw
gistfile1.m
hosted with ❤ by GitHub

I’m really on just getting started with OpenGL, so I don’t know if I’ve done everything the best way – I just know I finally got it to work!