I'm trying to get shaders to work in an MFC app with an OpenGL 4 core profile.
我想让shaders在一个OpenGL 4 core profile的MFC应用中工作。
I did this in a Win32 app to make sure it works (it does, drawing a triangle in the lower part of the window):
我在Win32应用程序中做了这个,以确保它能正常工作(它在窗口的下方画一个三角形):
BOOL InitInstance(HINSTANCE hInstance, int nCmdShow)
{
HWND hWnd;
hInst = hInstance; // Store instance handle in our global variable
hWnd = CreateWindow(szWindowClass, szTitle, WS_OVERLAPPEDWINDOW,
CW_USEDEFAULT, 0, CW_USEDEFAULT, 0, NULL, NULL, hInstance, NULL);
if (!hWnd)
{
return FALSE;
}
m_hDC = ::GetDC(hWnd);
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 32;
pfd.cDepthBits = 32;
pfd.iLayerType = PFD_MAIN_PLANE;
int nPixelFormat = ChoosePixelFormat(m_hDC, &pfd);
if (nPixelFormat == 0) return false;
BOOL bResult = SetPixelFormat(m_hDC, nPixelFormat, &pfd);
if (!bResult) return false;
HGLRC tempContext = wglCreateContext(m_hDC);
wglMakeCurrent(m_hDC, tempContext);
GLenum err = glewInit();
if (GLEW_OK != err)
{
MessageBox(hWnd, (LPCWSTR)L"Glew not initialized", (LPCWSTR)L"Error", MB_ICONEXCLAMATION);
}
//Get a GL 4,2 context
int attribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, 4,
WGL_CONTEXT_MINOR_VERSION_ARB, 2,
WGL_CONTEXT_FLAGS_ARB, 0,
0
};
if (wglewIsSupported("WGL_ARB_create_context") == 1)
{
m_hRC = wglCreateContextAttribsARB(m_hDC, 0, attribs);
wglMakeCurrent(NULL, NULL);
wglDeleteContext(tempContext);
wglMakeCurrent(m_hDC, m_hRC);
}
else
{ //It's not possible to make a GL 4.x context. Use the old style context (GL 2.1 and before)
m_hRC = tempContext;
}
if (!m_hRC) return false;
static const char * vs_source[] =
{
"#version 420 core \n"
" \n"
"void main(void) \n"
"{ \n"
" const vec4 vertices[] = vec4[](vec4( 2.25, -2.25, 0.5, 1.0), \n"
" vec4(-2.25, -2.25, 0.5, 1.0), \n"
" vec4( 2.25, 2.25, 0.5, 1.0)); \n"
" \n"
" gl_Position = vertices[gl_VertexID]; \n"
"} \n"
};
static const char * fs_source[] =
{
"#version 420 core \n"
" \n"
"out vec4 color; \n"
" \n"
"void main(void) \n"
"{ \n"
" color = vec4(1.0, 0.8, 1.0, 1.0); \n"
"} \n"
};
program = glCreateProgram();
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, fs_source, NULL);
glCompileShader(fs);
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, vs_source, NULL);
glCompileShader(vs);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
ShowWindow(hWnd, nCmdShow);
UpdateWindow(hWnd);
return TRUE;
}
const GLfloat green[] = { 0.0f, 0.25f, 0.0f, 1.0f };
//
// FUNCTION: WndProc(HWND, UINT, WPARAM, LPARAM)
//
// PURPOSE: Processes messages for the main window.
//
// WM_COMMAND - process the application menu
// WM_PAINT - Paint the main window
// WM_DESTROY - post a quit message and return
//
//
LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
int wmId, wmEvent;
PAINTSTRUCT ps;
HDC hdc;
switch (message)
{
case WM_COMMAND:
wmId = LOWORD(wParam);
wmEvent = HIWORD(wParam);
// Parse the menu selections:
switch (wmId)
{
case IDM_ABOUT:
DialogBox(hInst, MAKEINTRESOURCE(IDD_ABOUTBOX), hWnd, About);
break;
case IDM_EXIT:
DestroyWindow(hWnd);
break;
default:
return DefWindowProc(hWnd, message, wParam, lParam);
}
break;
case WM_PAINT:
//hdc = BeginPaint(hWnd, &ps);
// TODO: Add any drawing code here...
glClearBufferfv(GL_COLOR, 0, green);
glUseProgram(program);
glDrawArrays(GL_TRIANGLES, 0, 3);
SwapBuffers(m_hDC);
//EndPaint(hWnd, &ps);
break;
case WM_DESTROY:
PostQuitMessage(0);
break;
default:
return DefWindowProc(hWnd, message, wParam, lParam);
}
return 0;
}
Then I did the same in an MFC MDI program and although the background gets cleared to the same color as the Win32 app, the shaders don't draw anything.
然后我在MFC MDI程序中做了同样的事情,尽管背景被允许与Win32应用程序相同的颜色,但是着色器不会绘制任何东西。
For completion, here's the relevant code in my MFC MDI view class (it's different from the code in the Win32 app because I've been trying to figure out what's wrong, but it was once the same as the Win32 app code and it didn't work):
为了完成,这里是MFC MDI视图类中的相关代码(它与Win32应用程序中的代码不同,因为我一直在尝试找出什么是错误的,但是它与Win32应用程序代码是一样的,它没有工作):
int CMFCApplication2View::OnCreate(LPCREATESTRUCT lpCreateStruct)
{
if (CView::OnCreate(lpCreateStruct) == -1)
return -1;
// TODO: Add your specialized creation code here
m_hDC = ::GetDC(m_hWnd);
PIXELFORMATDESCRIPTOR pfd;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
pfd.nSize = sizeof(PIXELFORMATDESCRIPTOR);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 32;
pfd.cDepthBits = 24;
pfd.iLayerType = PFD_MAIN_PLANE;
int nPixelFormat = ChoosePixelFormat(m_hDC, &pfd);
if (nPixelFormat == 0) return false;
BOOL bResult = SetPixelFormat(m_hDC, nPixelFormat, &pfd);
if (!bResult) return false;
HGLRC tempContext = wglCreateContext(m_hDC);
wglMakeCurrent(m_hDC, tempContext);
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
{
AfxMessageBox(_T("GLEW is not initialized!"));
}
//This is a modern pixel format attribute list.
//It has an extensible structure. Just add in more argument pairs
//befroe the null to request more features.
const int attribList[] =
{
WGL_DRAW_TO_WINDOW_ARB, GL_TRUE,
WGL_SUPPORT_OPENGL_ARB, GL_TRUE,
WGL_ACCELERATION_ARB, WGL_FULL_ACCELERATION_ARB,
WGL_DOUBLE_BUFFER_ARB, GL_TRUE,
WGL_PIXEL_TYPE_ARB, WGL_TYPE_RGBA_ARB,
WGL_COLOR_BITS_ARB, 32,
WGL_DEPTH_BITS_ARB, 24,
WGL_STENCIL_BITS_ARB, 8,
0, 0 //End
};
unsigned int numFormats;
int pixelFormat;
memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
//Select a pixel format number
wglChoosePixelFormatARB(m_hDC, attribList, NULL, 1, &pixelFormat, &numFormats);
//Optional: Get the pixel format's description. We must provide a
//description to SetPixelFormat(), but its contents mean little.
//According to MSDN:
// The system's metafile component uses this structure to record the logical
// pixel format specification. The structure has no other effect upon the
// behavior of the SetPixelFormat function.
//DescribePixelFormat(m_pDC->GetSafeHdc(), pixelFormat, sizeof(PIXELFORMATDESCRIPTOR), &pfd);
//Set it as the current
if (FALSE == SetPixelFormat(m_hDC, pixelFormat, &pfd))
{
}
//Get a GL 4,4 context
int attribs[] =
{
WGL_CONTEXT_MAJOR_VERSION_ARB, 3,
WGL_CONTEXT_MINOR_VERSION_ARB, 2,
WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_CORE_PROFILE_BIT_ARB,
//WGL_CONTEXT_PROFILE_MASK_ARB, WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB,
0, 0 //End
};
if (wglewIsSupported("WGL_ARB_create_context") == 1)
{
m_hRC = wglCreateContextAttribsARB(m_hDC, 0, attribs);
wglMakeCurrent(NULL, NULL);
wglDeleteContext(tempContext);
wglMakeCurrent(m_hDC, m_hRC);
}
else
{ //It's not possible to make a GL 4.x context. Use the old style context (GL 2.1 and before)
m_hRC = tempContext;
}
if (!m_hRC) return false;
static const char * vs_source[] =
{
"#version 150 core \n"
" \n"
"void main(void) \n"
"{ \n"
" const vec4 vertices[] = vec4[](vec4( 2.25, -2.25, 0.5, 1.0), \n"
" vec4(-2.25, -2.25, 0.5, 1.0), \n"
" vec4( 2.25, 2.25, 0.5, 1.0)); \n"
" \n"
" gl_Position = vertices[gl_VertexID]; \n"
"} \n"
};
static const char * fs_source[] =
{
"#version 150 core \n"
" \n"
"out vec4 color; \n"
" \n"
"void main(void) \n"
"{ \n"
" color = vec4(1.0, 0.8, 1.0, 1.0); \n"
"} \n"
};
program = glCreateProgram();
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, fs_source, NULL);
glCompileShader(fs);
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, vs_source, NULL);
glCompileShader(vs);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
wglMakeCurrent(NULL, NULL);
return 0;
}
void CMFCApplication2View::OnDraw(CDC*/* pDC*/)
{
CMFCApplication2Doc* pDoc = GetDocument();
ASSERT_VALID(pDoc);
if (!pDoc)
return;
// TODO: add draw code for native data here
// Make the rendering context current
wglMakeCurrent(m_hDC, m_hRC);
// TODO: add draw code for native data here
static const GLfloat green[] = { 1.0f, 0.0f, 0.0f, 1.0f };
glClearBufferfv(GL_COLOR, 0, green);
glUseProgram(program);
glDrawArrays(GL_TRIANGLES, 0, 3);
SwapBuffers(m_hDC);
// Allow other rendering contexts to coexist
wglMakeCurrent(NULL, NULL);
}
I ran both executables through AMD's GPU Perfstudio. The Win32 frame debug show everything is ok. The MFC frame debug shows the frame buffer with a size of 0 (the frame buffer size of the win32 app is the size of the window). Both apps API traces are the same.
我通过AMD的GPU Perfstudio运行了两个可执行程序。Win32帧调试显示一切正常。MFC框架调试显示了一个大小为0的帧缓冲区(win32应用程序的帧缓冲区大小是窗口的大小)。两个应用程序的API跟踪都是相同的。
Any idea on what might be going on?
有什么想法吗?
1 个解决方案
#1
5
Apparently OpenGL in MFC, when using a core profile, needs to have a view port set, via glViewport.
显然OpenGL在MFC中,当使用一个核心配置文件时,需要通过glViewport设置一个视图端口集。
Handling WM_SIZE
and doing glViewport(0, 0, (GLsizei)cx, (GLsizei)cy)
solved the issue.
处理WM_SIZE并执行glViewport(0,0, (GLsizei)cx, (GLsizei)cy)解决了这个问题。
As a note, Andon was correct in saying that I shouldn't specify the pixel format twice. It didn't give any error, which is strange, but when I removed it everything kept on working.
作为一个说明,Andon是正确的,说我不应该指定像素格式两次。它没有给出任何错误,这很奇怪,但是当我删除它时,一切都在继续工作。
#1
5
Apparently OpenGL in MFC, when using a core profile, needs to have a view port set, via glViewport.
显然OpenGL在MFC中,当使用一个核心配置文件时,需要通过glViewport设置一个视图端口集。
Handling WM_SIZE
and doing glViewport(0, 0, (GLsizei)cx, (GLsizei)cy)
solved the issue.
处理WM_SIZE并执行glViewport(0,0, (GLsizei)cx, (GLsizei)cy)解决了这个问题。
As a note, Andon was correct in saying that I shouldn't specify the pixel format twice. It didn't give any error, which is strange, but when I removed it everything kept on working.
作为一个说明,Andon是正确的,说我不应该指定像素格式两次。它没有给出任何错误,这很奇怪,但是当我删除它时,一切都在继续工作。