QSurfaceFormat fmt;
#ifdef CONFIG_DARWIN
+ /* On macOS, pre-GL3 and modern core GL are two separate GL implementations
+ * that cannot share resources with each other. Therefore we ignore the input
+ * version to ensure we always use the modern implementation.
+ * Additionally, we want to reuse contexts on macOS, see comment in
+ * qt5_gl_create_context_internal. This only really makes sense if they all
+ * have the same version. */
fmt.setVersion(4, 1);
fmt.setProfile(QSurfaceFormat::CoreProfile);
#else
*/
#include <memory>
+#include <vector>
#include <QApplication>
#include <QOpenGLContext>
Q_INIT_RESOURCE(resource);
+#ifdef CONFIG_DARWIN
if (display_type == MARU_DISPLAY_TYPE_ONSCREEN || display_type == MARU_DISPLAY_TYPE_GL) {
+#else
+ if (display_type == MARU_DISPLAY_TYPE_ONSCREEN) {
+#endif
QCoreApplication::setAttribute(Qt::AA_UseDesktopOpenGL);
QCoreApplication::setAttribute(Qt::AA_ShareOpenGLContexts);
#define DPY_ITEM_NO 5
static dpy_item dpy_item_pool[DPY_ITEM_NO];
static uint64_t dpy_item_gen[DPY_ITEM_NO];
+static std::vector<QOpenGLContext *> context_pool;
void qt5_gl_init_items()
{
void *qt5_gl_create_context_internal(int major, int minor)
{
+#ifdef CONFIG_DARWIN
+ /* It seems that there is a limit of GL contexts that can be created by a single application.
+ * After exhausting it, the application segfaults even if the old contexts are deleted.
+ * Therefore, we reuse contexts instead of destroying them. Note that all of the contexts
+ * have the same GL version, see DisplayGLWidget::createSharedContext. */
+ if (!context_pool.empty()) {
+ auto ctx = context_pool.back();
+ context_pool.pop_back();
+ return ctx;
+ }
+#endif
if (mainwindow) {
return ((DisplayGLWidget *)mainwindow->getDisplay())->createSharedContext(major, minor);
} else {
void qt5_gl_destroy_context_internal(void *_ctx)
{
auto ctx = (QOpenGLContext *)_ctx;
+#ifdef CONFIG_DARWIN
+ context_pool.push_back(ctx);
+#else
delete ctx;
+#endif
}
int qt5_gl_make_context_current_internal(void *_ctx)