如何使用GLUT/OpenGL渲染到文件?

发布于 2024-09-09 02:26:24 字数 351 浏览 4 评论 0 原文

我有一个程序可以模拟随时间变化的物理系统。我想以预定的时间间隔(例如每 10 秒)将模拟状态的可视化输出到文件中。我想以这样一种方式来做到这一点,即很容易“关闭可视化”并且根本不输出可视化。

我将 OpenGL 和 GLUT 视为进行可视化的图形工具。然而,问题似乎首先是,它看起来只能输出到窗口,而不能输出到文件。其次,为了生成可视化,您必须调用 GLUTMainLoop 并停止主函数的执行 - 从那时起,唯一被调用的函数是来自 GUI 的调用。但是我不希望这是一个基于 GUI 的应用程序 - 我希望它只是一个从命令行运行的应用程序,并且它会生成一系列图像。有没有办法在 GLUT/OpenGL 中做到这一点?或者 OpenGL 完全是错误的工具,我应该使用其他工具

I have a program which simulates a physical system that changes over time. I want to, at predetermined intervals (say every 10 seconds) output a visualization of the state of the simulation to a file. I want to do it in such a way that it is easy to "turn the visualization off" and not output the visualization at all.

I am looking at OpenGL and GLUT as graphics tools to do the visualization. However the problem seems to be that first of all, it looks like it only outputs to a window and can't output to a file. Second, in order to generate the visualization you have to call GLUTMainLoop and that stops the execution of the main function - the only functions that get called from then on are calls from the GUI. However I do not want this to be a GUI based application - I want it to just be an application that you run from the command line, and it generates a series of images. Is there a way to do this in GLUT/OpenGL? Or is OpenGL the wrong tool for this completely and I should use something else

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(6

天涯沦落人 2024-09-16 02:26:24

glReadPixels 可运行的 PBO 示例

下面的示例生成以下任一结果:

  • 200 FPS 下每帧 1 ppm,无额外依赖项;
  • 600 FPS 下每帧 1 个 png,libpng
  • 所有帧均为 1 mpg 使用 FFmpeg 以 1200 FPS 运行

在 ramfs 上 。压缩越好,FPS就越大,所以我们必须 内存 IO 限制

在我的 60 FPS 屏幕上,FPS 大于 200,并且所有图像都不同,因此我确信它不限于屏幕的 FPS。

此答案中的 GIF 是从视频生成的,如下所述:https://askubuntu.com/questions/648603/how-to-create-an-animated-gif-from-mp4-video-via-command-line/837574#837574< /a>

glReadPixels 是从屏幕读取像素的关键 OpenGL 函数。另请查看 init() 下的设置。

与大多数图像格式不同,glReadPixels 首先读取像素的底行,因此通常需要进行转换。

offscreen.c

#ifndef PPM
#define PPM 1
#endif
#ifndef LIBPNG
#define LIBPNG 1
#endif
#ifndef FFMPEG
#define FFMPEG 1
#endif

#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>

#define GL_GLEXT_PROTOTYPES 1
#include <GL/gl.h>
#include <GL/glu.h>
#include <GL/glut.h>
#include <GL/glext.h>

#if LIBPNG
#include <png.h>
#endif

#if FFMPEG
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>
#endif

enum Constants { SCREENSHOT_MAX_FILENAME = 256 };
static GLubyte *pixels = NULL;
static GLuint fbo;
static GLuint rbo_color;
static GLuint rbo_depth;
static int offscreen = 1;
static unsigned int max_nframes = 128;
static unsigned int nframes = 0;
static unsigned int time0;
static unsigned int height = 128;
static unsigned int width = 128;
#define PPM_BIT (1 << 0)
#define LIBPNG_BIT (1 << 1)
#define FFMPEG_BIT (1 << 2)
static unsigned int output_formats = PPM_BIT | LIBPNG_BIT | FFMPEG_BIT;

/* Model. */
static double angle;
static double delta_angle;

#if PPM
/* Take screenshot with glReadPixels and save to a file in PPM format.
 *
 * -   filename: file path to save to, without extension
 * -   width: screen width in pixels
 * -   height: screen height in pixels
 * -   pixels: intermediate buffer to avoid repeated mallocs across multiple calls.
 *     Contents of this buffer do not matter. May be NULL, in which case it is initialized.
 *     You must `free` it when you won't be calling this function anymore.
 */
static void screenshot_ppm(const char *filename, unsigned int width,
        unsigned int height, GLubyte **pixels) {
    size_t i, j, cur;
    const size_t format_nchannels = 3;
    FILE *f = fopen(filename, "w");
    fprintf(f, "P3\n%d %d\n%d\n", width, height, 255);
    *pixels = realloc(*pixels, format_nchannels * sizeof(GLubyte) * width * height);
    glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, *pixels);
    for (i = 0; i < height; i++) {
        for (j = 0; j < width; j++) {
            cur = format_nchannels * ((height - i - 1) * width + j);
            fprintf(f, "%3d %3d %3d ", (*pixels)[cur], (*pixels)[cur + 1], (*pixels)[cur + 2]);
        }
        fprintf(f, "\n");
    }
    fclose(f);
}
#endif

#if LIBPNG
/* Adapted from https://github.com/cirosantilli/cpp-cheat/blob/19044698f91fefa9cb75328c44f7a487d336b541/png/open_manipulate_write.c */
static png_byte *png_bytes = NULL;
static png_byte **png_rows = NULL;
static void screenshot_png(const char *filename, unsigned int width, unsigned int height,
        GLubyte **pixels, png_byte **png_bytes, png_byte ***png_rows) {
    size_t i, nvals;
    const size_t format_nchannels = 4;
    FILE *f = fopen(filename, "wb");
    nvals = format_nchannels * width * height;
    *pixels = realloc(*pixels, nvals * sizeof(GLubyte));
    *png_bytes = realloc(*png_bytes, nvals * sizeof(png_byte));
    *png_rows = realloc(*png_rows, height * sizeof(png_byte*));
    glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, *pixels);
    for (i = 0; i < nvals; i++)
        (*png_bytes)[i] = (*pixels)[i];
    for (i = 0; i < height; i++)
        (*png_rows)[height - i - 1] = &(*png_bytes)[i * width * format_nchannels];
    png_structp png = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
    if (!png) abort();
    png_infop info = png_create_info_struct(png);
    if (!info) abort();
    if (setjmp(png_jmpbuf(png))) abort();
    png_init_io(png, f);
    png_set_IHDR(
        png,
        info,
        width,
        height,
        8,
        PNG_COLOR_TYPE_RGBA,
        PNG_INTERLACE_NONE,
        PNG_COMPRESSION_TYPE_DEFAULT,
        PNG_FILTER_TYPE_DEFAULT
    );
    png_write_info(png, info);
    png_write_image(png, *png_rows);
    png_write_end(png, NULL);
    png_destroy_write_struct(&png, &info);
    fclose(f);
}
#endif

#if FFMPEG
/* Adapted from: https://github.com/cirosantilli/cpp-cheat/blob/19044698f91fefa9cb75328c44f7a487d336b541/ffmpeg/encode.c */

static AVCodecContext *c = NULL;
static AVFrame *frame;
static AVPacket pkt;
static FILE *file;
static struct SwsContext *sws_context = NULL;
static uint8_t *rgb = NULL;

static void ffmpeg_encoder_set_frame_yuv_from_rgb(uint8_t *rgb) {
    const int in_linesize[1] = { 4 * c->width };
    sws_context = sws_getCachedContext(sws_context,
            c->width, c->height, AV_PIX_FMT_RGB32,
            c->width, c->height, AV_PIX_FMT_YUV420P,
            0, NULL, NULL, NULL);
    sws_scale(sws_context, (const uint8_t * const *)&rgb, in_linesize, 0,
            c->height, frame->data, frame->linesize);
}

void ffmpeg_encoder_start(const char *filename, int codec_id, int fps, int width, int height) {
    AVCodec *codec;
    int ret;
    avcodec_register_all();
    codec = avcodec_find_encoder(codec_id);
    if (!codec) {
        fprintf(stderr, "Codec not found\n");
        exit(1);
    }
    c = avcodec_alloc_context3(codec);
    if (!c) {
        fprintf(stderr, "Could not allocate video codec context\n");
        exit(1);
    }
    c->bit_rate = 400000;
    c->width = width;
    c->height = height;
    c->time_base.num = 1;
    c->time_base.den = fps;
    c->gop_size = 10;
    c->max_b_frames = 1;
    c->pix_fmt = AV_PIX_FMT_YUV420P;
    if (codec_id == AV_CODEC_ID_H264)
        av_opt_set(c->priv_data, "preset", "slow", 0);
    if (avcodec_open2(c, codec, NULL) < 0) {
        fprintf(stderr, "Could not open codec\n");
        exit(1);
    }
    file = fopen(filename, "wb");
    if (!file) {
        fprintf(stderr, "Could not open %s\n", filename);
        exit(1);
    }
    frame = av_frame_alloc();
    if (!frame) {
        fprintf(stderr, "Could not allocate video frame\n");
        exit(1);
    }
    frame->format = c->pix_fmt;
    frame->width  = c->width;
    frame->height = c->height;
    ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height, c->pix_fmt, 32);
    if (ret < 0) {
        fprintf(stderr, "Could not allocate raw picture buffer\n");
        exit(1);
    }
}

void ffmpeg_encoder_finish(void) {
    uint8_t endcode[] = { 0, 0, 1, 0xb7 };
    int got_output, ret;
    do {
        fflush(stdout);
        ret = avcodec_encode_video2(c, &pkt, NULL, &got_output);
        if (ret < 0) {
            fprintf(stderr, "Error encoding frame\n");
            exit(1);
        }
        if (got_output) {
            fwrite(pkt.data, 1, pkt.size, file);
            av_packet_unref(&pkt);
        }
    } while (got_output);
    fwrite(endcode, 1, sizeof(endcode), file);
    fclose(file);
    avcodec_close(c);
    av_free(c);
    av_freep(&frame->data[0]);
    av_frame_free(&frame);
}

void ffmpeg_encoder_encode_frame(uint8_t *rgb) {
    int ret, got_output;
    ffmpeg_encoder_set_frame_yuv_from_rgb(rgb);
    av_init_packet(&pkt);
    pkt.data = NULL;
    pkt.size = 0;
    ret = avcodec_encode_video2(c, &pkt, frame, &got_output);
    if (ret < 0) {
        fprintf(stderr, "Error encoding frame\n");
        exit(1);
    }
    if (got_output) {
        fwrite(pkt.data, 1, pkt.size, file);
        av_packet_unref(&pkt);
    }
}

void ffmpeg_encoder_glread_rgb(uint8_t **rgb, GLubyte **pixels, unsigned int width, unsigned int height) {
    size_t i, j, k, cur_gl, cur_rgb, nvals;
    const size_t format_nchannels = 4;
    nvals = format_nchannels * width * height;
    *pixels = realloc(*pixels, nvals * sizeof(GLubyte));
    *rgb = realloc(*rgb, nvals * sizeof(uint8_t));
    /* Get RGBA to align to 32 bits instead of just 24 for RGB. May be faster for FFmpeg. */
    glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, *pixels);
    for (i = 0; i < height; i++) {
        for (j = 0; j < width; j++) {
            cur_gl  = format_nchannels * (width * (height - i - 1) + j);
            cur_rgb = format_nchannels * (width * i + j);
            for (k = 0; k < format_nchannels; k++)
                (*rgb)[cur_rgb + k] = (*pixels)[cur_gl + k];
        }
    }
}
#endif

static void model_init(void) {
    angle = 0;
    delta_angle = 1;
}

static int model_update(void) {
    angle += delta_angle;
    return 0;
}

static int model_finished(void) {
    return nframes >= max_nframes;
}

static void init(void)  {
    int glget;

    if (offscreen) {
        /*  Framebuffer */
        glGenFramebuffers(1, &fbo);
        glBindFramebuffer(GL_FRAMEBUFFER, fbo);

        /* Color renderbuffer. */
        glGenRenderbuffers(1, &rbo_color);
        glBindRenderbuffer(GL_RENDERBUFFER, rbo_color);
        /* Storage must be one of: */
        /* GL_RGBA4, GL_RGB565, GL_RGB5_A1, GL_DEPTH_COMPONENT16, GL_STENCIL_INDEX8. */
        glRenderbufferStorage(GL_RENDERBUFFER, GL_RGB565, width, height);
        glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, rbo_color);

        /* Depth renderbuffer. */
        glGenRenderbuffers(1, &rbo_depth);
        glBindRenderbuffer(GL_RENDERBUFFER, rbo_depth);
        glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
        glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, rbo_depth);

        glReadBuffer(GL_COLOR_ATTACHMENT0);

        /* Sanity check. */
        assert(glCheckFramebufferStatus(GL_FRAMEBUFFER));
        glGetIntegerv(GL_MAX_RENDERBUFFER_SIZE, &glget);
        assert(width < (unsigned int)glget);
        assert(height < (unsigned int)glget);
    } else {
        glReadBuffer(GL_BACK);
    }

    glClearColor(0.0, 0.0, 0.0, 0.0);
    glEnable(GL_DEPTH_TEST);
    glPixelStorei(GL_PACK_ALIGNMENT, 1);
    glViewport(0, 0, width, height);
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glMatrixMode(GL_MODELVIEW);

    time0 = glutGet(GLUT_ELAPSED_TIME);
    model_init();
#if FFMPEG
    ffmpeg_encoder_start("tmp.mpg", AV_CODEC_ID_MPEG1VIDEO, 25, width, height);
#endif
}

static void deinit(void)  {
    printf("FPS = %f\n", 1000.0 * nframes / (double)(glutGet(GLUT_ELAPSED_TIME) - time0));
    free(pixels);
#if LIBPNG
    if (output_formats & LIBPNG_BIT) {
        free(png_bytes);
        free(png_rows);
    }
#endif
#if FFMPEG
    if (output_formats & FFMPEG_BIT) {
        ffmpeg_encoder_finish();
        free(rgb);
    }
#endif
    if (offscreen) {
        glDeleteFramebuffers(1, &fbo);
        glDeleteRenderbuffers(1, &rbo_color);
        glDeleteRenderbuffers(1, &rbo_depth);
    }
}

static void draw_scene(void) {
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glLoadIdentity();
    glRotatef(angle, 0.0f, 0.0f, -1.0f);
    glBegin(GL_TRIANGLES);
    glColor3f(1.0f, 0.0f, 0.0f);
    glVertex3f( 0.0f,  0.5f, 0.0f);
    glColor3f(0.0f, 1.0f, 0.0f);
    glVertex3f(-0.5f, -0.5f, 0.0f);
    glColor3f(0.0f, 0.0f, 1.0f);
    glVertex3f( 0.5f, -0.5f, 0.0f);
    glEnd();
}

static void display(void) {
    char filename[SCREENSHOT_MAX_FILENAME];
    draw_scene();
    if (offscreen) {
        glFlush();
    } else {
        glutSwapBuffers();
    }
#if PPM
    if (output_formats & PPM_BIT) {
        snprintf(filename, SCREENSHOT_MAX_FILENAME, "tmp.%d.ppm", nframes);
        screenshot_ppm(filename, width, height, &pixels);
    }
#endif
#if LIBPNG
    if (output_formats & LIBPNG_BIT) {
        snprintf(filename, SCREENSHOT_MAX_FILENAME, "tmp.%d.png", nframes);
        screenshot_png(filename, width, height, &pixels, &png_bytes, &png_rows);
    }
#endif
# if FFMPEG
    if (output_formats & FFMPEG_BIT) {
        frame->pts = nframes;
        ffmpeg_encoder_glread_rgb(&rgb, &pixels, width, height);
        ffmpeg_encoder_encode_frame(rgb);
    }
#endif
    nframes++;
    if (model_finished())
        exit(EXIT_SUCCESS);
}

static void idle(void) {
    while (model_update());
    glutPostRedisplay();
}

int main(int argc, char **argv) {
    int arg;
    GLint glut_display;

    /* CLI args. */
    glutInit(&argc, argv);
    arg = 1;
    if (argc > arg) {
        offscreen = (argv[arg][0] == '1');
    } else {
        offscreen = 1;
    }
    arg++;
    if (argc > arg) {
        max_nframes = strtoumax(argv[arg], NULL, 10);
    }
    arg++;
    if (argc > arg) {
        width = strtoumax(argv[arg], NULL, 10);
    }
    arg++;
    if (argc > arg) {
        height = strtoumax(argv[arg], NULL, 10);
    }
    arg++;
    if (argc > arg) {
        output_formats = strtoumax(argv[arg], NULL, 10);
    }

    /* Work. */
    if (offscreen) {
        /* TODO: if we use anything smaller than the window, it only renders a smaller version of things. */
        /*glutInitWindowSize(50, 50);*/
        glutInitWindowSize(width, height);
        glut_display = GLUT_SINGLE;
    } else {
        glutInitWindowSize(width, height);
        glutInitWindowPosition(100, 100);
        glut_display = GLUT_DOUBLE;
    }
    glutInitDisplayMode(glut_display | GLUT_RGBA | GLUT_DEPTH);
    glutCreateWindow(argv[0]);
    if (offscreen) {
        /* TODO: if we hide the window the program blocks. */
        /*glutHideWindow();*/
    }
    init();
    glutDisplayFunc(display);
    glutIdleFunc(idle);
    atexit(deinit);
    glutMainLoop();
    return EXIT_SUCCESS;
}

在 GitHub 上

编译:

sudo apt-get install libpng-dev libavcodec-dev libavutil-dev
gcc -DPPM=1 -DLIBPNG=1 -DFFMPEG=1 -ggdb3 -std=c99 -O0 -Wall -Wextra \
  -o offscreen offscreen.c -lGL -lGLU -lglut -lpng -lavcodec -lswscale -lavutil

“离屏”运行 10 帧(主要是 TODO,可以工作,但没有优势),大小为 200 x 100,所有输出格式:

./offscreen 1 10 200 100 7

CLI 格式为:

./offscreen [offscreen [nframes [width [height [output_formats]]]]]

并且 output_formats 是位掩码:

ppm >> 0 | png >> 1 | mpeg >> 2

运行于-屏幕(也不限制我的 FPS):

./offscreen 0

在 Ubuntu 15.10、OpenGL 4.4.0 NVIDIA 352.63、Lenovo Thinkpad T430 上进行基准测试。

还在 ubuntu 18.04、OpenGL 4.6.0 NVIDIA 390.77、Lenovo Thinkpad P51 上进行了测试。

TODO:找到一种方法在没有 GUI 的机器上执行此操作(例如 X11)。看来 OpenGL 并不是为离屏渲染而设计的,将像素读回到 GPU 是在窗口系统的接口上实现的(例如 GLX)。请参阅:Linux 中没有 X.org 的 OpenGL

TODO:使用 1x1 窗口,使其不可调整大小,并将其隐藏以使事情更加稳健。如果我执行其中任何一个,渲染都会失败,请参阅代码注释。在 Glut 中阻止调整大小似乎是不可能的,但是GLFW 支持。无论如何,这些并不重要,因为即使 offscreen 关闭,我的 FPS 也不受屏幕刷新频率的限制。

除 PBO 之外的其他选项

  • 渲染到后台缓冲区(默认渲染位置)
  • 渲染到纹理
  • 渲染到 Pixelbuffer 对象 (PBO)

Framebuffer Pixelbuffer 比后备缓冲区和纹理更好,因为它们是为了将数据读回 CPU 而设计的,而后备缓冲区和纹理则是为了保留在 GPU 上并在屏幕上显示而设计的。

PBO用于异步传输,所以我认为我们不需要它,请参阅:OpenGL 中的帧缓冲区对象和像素缓冲区对象有什么区别?

也许离屏 Mesa 值得研究一下:http://www.mesa3d.org/osmesa.html

Vulkan

看来Vulkan 的设计比 OpenGL 更好地支持离屏渲染。

NVIDIA 概述中提到了这一点:https://developer.nvidia.com/transitioning-opengl- vulkan

这是一个我刚刚在本地运行的可运行示例: https://github.com/SaschaWillems/Vulkan/tree/b9f0ac91d2adccc3055a904d3a8f6553b10ff6cd/examples/renderheadless/renderheadless.cpp

安装驱动程序和 确保 GPU 正在工作 我可以做:

git clone https://github.com/SaschaWillems/Vulkan
cd Vulkan
git checkout b9f0ac91d2adccc3055a904d3a8f6553b10ff6cd
python download_assets.py
mkdir build
cd build
cmake ..
make -j`nproc`
cd bin
./renderheadless

这会立即生成不打开任何窗口的图像 headless.ppm

在此处输入图像描述

我还设法运行此程序 Ubuntu Ctrl + Alt + F3 非图形 TTY,这进一步表明它确实不需要屏幕。

其他可能感兴趣的示例:

相关:Vulkan 中是否可以在没有 Surface 的情况下进行离屏渲染?

在 Ubuntu 20.04、NVIDIA 驱动程序 435.21、NVIDIA Quadro M1200 GPU 上进行测试。

apiretrace

https://github.com/apitrace/apitrace

就可以了,并且根本不需要您修改代码:

git clone https://github.com/apitrace/apitrace
cd apitrace
git checkout 7.0
mkdir build
cd build
cmake ..
make
# Creates opengl_executable.out.trace
./apitrace trace /path/to/opengl_executable.out
./apitrace dump-images opengl_executable.out.trace

也可在 Ubuntu 18.10 上使用:

 sudo apt-get install apitrace

您现在有一堆名为:

animation.out.<n>.png

TODO:工作原理的屏幕截图。

文档还建议视频使用此方法:

apitrace dump-images -o - application.trace \
  | ffmpeg -r 30 -f image2pipe -vcodec ppm -i pipe: -vcodec mpeg4 -y output.mp4

另请参阅:

WebGL + 画布图像保存

由于性能原因,这主要是一个玩具,但它适用于真正基本的用例:

参考书目

FBO 大于窗口:

无窗口/X11:

glReadPixels runnable PBO example

The example below generates either:

  • one ppm per frame at 200 FPS and no extra dependencies,
  • one png per frame at 600 FPS with libpng
  • one mpg for all frames at 1200 FPS with FFmpeg

on a ramfs. The better the compression, the larger the FPS, so we must be memory IO bound.

FPS is larger than 200 on my 60 FPS screen, and all images are different, so I'm sure that it's not limited to the screen's FPS.

The GIF in this answer was generated from the video as explained at: https://askubuntu.com/questions/648603/how-to-create-an-animated-gif-from-mp4-video-via-command-line/837574#837574

glReadPixels is the key OpenGL function that reads pixels from screen. Also have a look at the setup under init().

glReadPixels reads the bottom line of pixels first, unlike most image formats, so converting that is usually needed.

offscreen.c

#ifndef PPM
#define PPM 1
#endif
#ifndef LIBPNG
#define LIBPNG 1
#endif
#ifndef FFMPEG
#define FFMPEG 1
#endif

#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>

#define GL_GLEXT_PROTOTYPES 1
#include <GL/gl.h>
#include <GL/glu.h>
#include <GL/glut.h>
#include <GL/glext.h>

#if LIBPNG
#include <png.h>
#endif

#if FFMPEG
#include <libavcodec/avcodec.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>
#endif

enum Constants { SCREENSHOT_MAX_FILENAME = 256 };
static GLubyte *pixels = NULL;
static GLuint fbo;
static GLuint rbo_color;
static GLuint rbo_depth;
static int offscreen = 1;
static unsigned int max_nframes = 128;
static unsigned int nframes = 0;
static unsigned int time0;
static unsigned int height = 128;
static unsigned int width = 128;
#define PPM_BIT (1 << 0)
#define LIBPNG_BIT (1 << 1)
#define FFMPEG_BIT (1 << 2)
static unsigned int output_formats = PPM_BIT | LIBPNG_BIT | FFMPEG_BIT;

/* Model. */
static double angle;
static double delta_angle;

#if PPM
/* Take screenshot with glReadPixels and save to a file in PPM format.
 *
 * -   filename: file path to save to, without extension
 * -   width: screen width in pixels
 * -   height: screen height in pixels
 * -   pixels: intermediate buffer to avoid repeated mallocs across multiple calls.
 *     Contents of this buffer do not matter. May be NULL, in which case it is initialized.
 *     You must `free` it when you won't be calling this function anymore.
 */
static void screenshot_ppm(const char *filename, unsigned int width,
        unsigned int height, GLubyte **pixels) {
    size_t i, j, cur;
    const size_t format_nchannels = 3;
    FILE *f = fopen(filename, "w");
    fprintf(f, "P3\n%d %d\n%d\n", width, height, 255);
    *pixels = realloc(*pixels, format_nchannels * sizeof(GLubyte) * width * height);
    glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, *pixels);
    for (i = 0; i < height; i++) {
        for (j = 0; j < width; j++) {
            cur = format_nchannels * ((height - i - 1) * width + j);
            fprintf(f, "%3d %3d %3d ", (*pixels)[cur], (*pixels)[cur + 1], (*pixels)[cur + 2]);
        }
        fprintf(f, "\n");
    }
    fclose(f);
}
#endif

#if LIBPNG
/* Adapted from https://github.com/cirosantilli/cpp-cheat/blob/19044698f91fefa9cb75328c44f7a487d336b541/png/open_manipulate_write.c */
static png_byte *png_bytes = NULL;
static png_byte **png_rows = NULL;
static void screenshot_png(const char *filename, unsigned int width, unsigned int height,
        GLubyte **pixels, png_byte **png_bytes, png_byte ***png_rows) {
    size_t i, nvals;
    const size_t format_nchannels = 4;
    FILE *f = fopen(filename, "wb");
    nvals = format_nchannels * width * height;
    *pixels = realloc(*pixels, nvals * sizeof(GLubyte));
    *png_bytes = realloc(*png_bytes, nvals * sizeof(png_byte));
    *png_rows = realloc(*png_rows, height * sizeof(png_byte*));
    glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, *pixels);
    for (i = 0; i < nvals; i++)
        (*png_bytes)[i] = (*pixels)[i];
    for (i = 0; i < height; i++)
        (*png_rows)[height - i - 1] = &(*png_bytes)[i * width * format_nchannels];
    png_structp png = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
    if (!png) abort();
    png_infop info = png_create_info_struct(png);
    if (!info) abort();
    if (setjmp(png_jmpbuf(png))) abort();
    png_init_io(png, f);
    png_set_IHDR(
        png,
        info,
        width,
        height,
        8,
        PNG_COLOR_TYPE_RGBA,
        PNG_INTERLACE_NONE,
        PNG_COMPRESSION_TYPE_DEFAULT,
        PNG_FILTER_TYPE_DEFAULT
    );
    png_write_info(png, info);
    png_write_image(png, *png_rows);
    png_write_end(png, NULL);
    png_destroy_write_struct(&png, &info);
    fclose(f);
}
#endif

#if FFMPEG
/* Adapted from: https://github.com/cirosantilli/cpp-cheat/blob/19044698f91fefa9cb75328c44f7a487d336b541/ffmpeg/encode.c */

static AVCodecContext *c = NULL;
static AVFrame *frame;
static AVPacket pkt;
static FILE *file;
static struct SwsContext *sws_context = NULL;
static uint8_t *rgb = NULL;

static void ffmpeg_encoder_set_frame_yuv_from_rgb(uint8_t *rgb) {
    const int in_linesize[1] = { 4 * c->width };
    sws_context = sws_getCachedContext(sws_context,
            c->width, c->height, AV_PIX_FMT_RGB32,
            c->width, c->height, AV_PIX_FMT_YUV420P,
            0, NULL, NULL, NULL);
    sws_scale(sws_context, (const uint8_t * const *)&rgb, in_linesize, 0,
            c->height, frame->data, frame->linesize);
}

void ffmpeg_encoder_start(const char *filename, int codec_id, int fps, int width, int height) {
    AVCodec *codec;
    int ret;
    avcodec_register_all();
    codec = avcodec_find_encoder(codec_id);
    if (!codec) {
        fprintf(stderr, "Codec not found\n");
        exit(1);
    }
    c = avcodec_alloc_context3(codec);
    if (!c) {
        fprintf(stderr, "Could not allocate video codec context\n");
        exit(1);
    }
    c->bit_rate = 400000;
    c->width = width;
    c->height = height;
    c->time_base.num = 1;
    c->time_base.den = fps;
    c->gop_size = 10;
    c->max_b_frames = 1;
    c->pix_fmt = AV_PIX_FMT_YUV420P;
    if (codec_id == AV_CODEC_ID_H264)
        av_opt_set(c->priv_data, "preset", "slow", 0);
    if (avcodec_open2(c, codec, NULL) < 0) {
        fprintf(stderr, "Could not open codec\n");
        exit(1);
    }
    file = fopen(filename, "wb");
    if (!file) {
        fprintf(stderr, "Could not open %s\n", filename);
        exit(1);
    }
    frame = av_frame_alloc();
    if (!frame) {
        fprintf(stderr, "Could not allocate video frame\n");
        exit(1);
    }
    frame->format = c->pix_fmt;
    frame->width  = c->width;
    frame->height = c->height;
    ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height, c->pix_fmt, 32);
    if (ret < 0) {
        fprintf(stderr, "Could not allocate raw picture buffer\n");
        exit(1);
    }
}

void ffmpeg_encoder_finish(void) {
    uint8_t endcode[] = { 0, 0, 1, 0xb7 };
    int got_output, ret;
    do {
        fflush(stdout);
        ret = avcodec_encode_video2(c, &pkt, NULL, &got_output);
        if (ret < 0) {
            fprintf(stderr, "Error encoding frame\n");
            exit(1);
        }
        if (got_output) {
            fwrite(pkt.data, 1, pkt.size, file);
            av_packet_unref(&pkt);
        }
    } while (got_output);
    fwrite(endcode, 1, sizeof(endcode), file);
    fclose(file);
    avcodec_close(c);
    av_free(c);
    av_freep(&frame->data[0]);
    av_frame_free(&frame);
}

void ffmpeg_encoder_encode_frame(uint8_t *rgb) {
    int ret, got_output;
    ffmpeg_encoder_set_frame_yuv_from_rgb(rgb);
    av_init_packet(&pkt);
    pkt.data = NULL;
    pkt.size = 0;
    ret = avcodec_encode_video2(c, &pkt, frame, &got_output);
    if (ret < 0) {
        fprintf(stderr, "Error encoding frame\n");
        exit(1);
    }
    if (got_output) {
        fwrite(pkt.data, 1, pkt.size, file);
        av_packet_unref(&pkt);
    }
}

void ffmpeg_encoder_glread_rgb(uint8_t **rgb, GLubyte **pixels, unsigned int width, unsigned int height) {
    size_t i, j, k, cur_gl, cur_rgb, nvals;
    const size_t format_nchannels = 4;
    nvals = format_nchannels * width * height;
    *pixels = realloc(*pixels, nvals * sizeof(GLubyte));
    *rgb = realloc(*rgb, nvals * sizeof(uint8_t));
    /* Get RGBA to align to 32 bits instead of just 24 for RGB. May be faster for FFmpeg. */
    glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, *pixels);
    for (i = 0; i < height; i++) {
        for (j = 0; j < width; j++) {
            cur_gl  = format_nchannels * (width * (height - i - 1) + j);
            cur_rgb = format_nchannels * (width * i + j);
            for (k = 0; k < format_nchannels; k++)
                (*rgb)[cur_rgb + k] = (*pixels)[cur_gl + k];
        }
    }
}
#endif

static void model_init(void) {
    angle = 0;
    delta_angle = 1;
}

static int model_update(void) {
    angle += delta_angle;
    return 0;
}

static int model_finished(void) {
    return nframes >= max_nframes;
}

static void init(void)  {
    int glget;

    if (offscreen) {
        /*  Framebuffer */
        glGenFramebuffers(1, &fbo);
        glBindFramebuffer(GL_FRAMEBUFFER, fbo);

        /* Color renderbuffer. */
        glGenRenderbuffers(1, &rbo_color);
        glBindRenderbuffer(GL_RENDERBUFFER, rbo_color);
        /* Storage must be one of: */
        /* GL_RGBA4, GL_RGB565, GL_RGB5_A1, GL_DEPTH_COMPONENT16, GL_STENCIL_INDEX8. */
        glRenderbufferStorage(GL_RENDERBUFFER, GL_RGB565, width, height);
        glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, rbo_color);

        /* Depth renderbuffer. */
        glGenRenderbuffers(1, &rbo_depth);
        glBindRenderbuffer(GL_RENDERBUFFER, rbo_depth);
        glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
        glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, rbo_depth);

        glReadBuffer(GL_COLOR_ATTACHMENT0);

        /* Sanity check. */
        assert(glCheckFramebufferStatus(GL_FRAMEBUFFER));
        glGetIntegerv(GL_MAX_RENDERBUFFER_SIZE, &glget);
        assert(width < (unsigned int)glget);
        assert(height < (unsigned int)glget);
    } else {
        glReadBuffer(GL_BACK);
    }

    glClearColor(0.0, 0.0, 0.0, 0.0);
    glEnable(GL_DEPTH_TEST);
    glPixelStorei(GL_PACK_ALIGNMENT, 1);
    glViewport(0, 0, width, height);
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glMatrixMode(GL_MODELVIEW);

    time0 = glutGet(GLUT_ELAPSED_TIME);
    model_init();
#if FFMPEG
    ffmpeg_encoder_start("tmp.mpg", AV_CODEC_ID_MPEG1VIDEO, 25, width, height);
#endif
}

static void deinit(void)  {
    printf("FPS = %f\n", 1000.0 * nframes / (double)(glutGet(GLUT_ELAPSED_TIME) - time0));
    free(pixels);
#if LIBPNG
    if (output_formats & LIBPNG_BIT) {
        free(png_bytes);
        free(png_rows);
    }
#endif
#if FFMPEG
    if (output_formats & FFMPEG_BIT) {
        ffmpeg_encoder_finish();
        free(rgb);
    }
#endif
    if (offscreen) {
        glDeleteFramebuffers(1, &fbo);
        glDeleteRenderbuffers(1, &rbo_color);
        glDeleteRenderbuffers(1, &rbo_depth);
    }
}

static void draw_scene(void) {
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glLoadIdentity();
    glRotatef(angle, 0.0f, 0.0f, -1.0f);
    glBegin(GL_TRIANGLES);
    glColor3f(1.0f, 0.0f, 0.0f);
    glVertex3f( 0.0f,  0.5f, 0.0f);
    glColor3f(0.0f, 1.0f, 0.0f);
    glVertex3f(-0.5f, -0.5f, 0.0f);
    glColor3f(0.0f, 0.0f, 1.0f);
    glVertex3f( 0.5f, -0.5f, 0.0f);
    glEnd();
}

static void display(void) {
    char filename[SCREENSHOT_MAX_FILENAME];
    draw_scene();
    if (offscreen) {
        glFlush();
    } else {
        glutSwapBuffers();
    }
#if PPM
    if (output_formats & PPM_BIT) {
        snprintf(filename, SCREENSHOT_MAX_FILENAME, "tmp.%d.ppm", nframes);
        screenshot_ppm(filename, width, height, &pixels);
    }
#endif
#if LIBPNG
    if (output_formats & LIBPNG_BIT) {
        snprintf(filename, SCREENSHOT_MAX_FILENAME, "tmp.%d.png", nframes);
        screenshot_png(filename, width, height, &pixels, &png_bytes, &png_rows);
    }
#endif
# if FFMPEG
    if (output_formats & FFMPEG_BIT) {
        frame->pts = nframes;
        ffmpeg_encoder_glread_rgb(&rgb, &pixels, width, height);
        ffmpeg_encoder_encode_frame(rgb);
    }
#endif
    nframes++;
    if (model_finished())
        exit(EXIT_SUCCESS);
}

static void idle(void) {
    while (model_update());
    glutPostRedisplay();
}

int main(int argc, char **argv) {
    int arg;
    GLint glut_display;

    /* CLI args. */
    glutInit(&argc, argv);
    arg = 1;
    if (argc > arg) {
        offscreen = (argv[arg][0] == '1');
    } else {
        offscreen = 1;
    }
    arg++;
    if (argc > arg) {
        max_nframes = strtoumax(argv[arg], NULL, 10);
    }
    arg++;
    if (argc > arg) {
        width = strtoumax(argv[arg], NULL, 10);
    }
    arg++;
    if (argc > arg) {
        height = strtoumax(argv[arg], NULL, 10);
    }
    arg++;
    if (argc > arg) {
        output_formats = strtoumax(argv[arg], NULL, 10);
    }

    /* Work. */
    if (offscreen) {
        /* TODO: if we use anything smaller than the window, it only renders a smaller version of things. */
        /*glutInitWindowSize(50, 50);*/
        glutInitWindowSize(width, height);
        glut_display = GLUT_SINGLE;
    } else {
        glutInitWindowSize(width, height);
        glutInitWindowPosition(100, 100);
        glut_display = GLUT_DOUBLE;
    }
    glutInitDisplayMode(glut_display | GLUT_RGBA | GLUT_DEPTH);
    glutCreateWindow(argv[0]);
    if (offscreen) {
        /* TODO: if we hide the window the program blocks. */
        /*glutHideWindow();*/
    }
    init();
    glutDisplayFunc(display);
    glutIdleFunc(idle);
    atexit(deinit);
    glutMainLoop();
    return EXIT_SUCCESS;
}

On GitHub.

Compile with:

sudo apt-get install libpng-dev libavcodec-dev libavutil-dev
gcc -DPPM=1 -DLIBPNG=1 -DFFMPEG=1 -ggdb3 -std=c99 -O0 -Wall -Wextra \
  -o offscreen offscreen.c -lGL -lGLU -lglut -lpng -lavcodec -lswscale -lavutil

Run 10 frames "offscreen" (mostly TODO, works but has no advantage), with size 200 x 100 and all output formats:

./offscreen 1 10 200 100 7

CLI format is:

./offscreen [offscreen [nframes [width [height [output_formats]]]]]

and output_formats is a bitmask:

ppm >> 0 | png >> 1 | mpeg >> 2

Run on-screen (does not limit my FPS either):

./offscreen 0

Benchmarked on Ubuntu 15.10, OpenGL 4.4.0 NVIDIA 352.63, Lenovo Thinkpad T430.

Also tested on ubuntu 18.04, OpenGL 4.6.0 NVIDIA 390.77, Lenovo Thinkpad P51.

TODO: find a way to do it on a machine without GUI (e.g. X11). It seems that OpenGL is just not made for offscreen rendering, and that reading pixels back to the GPU is implemented on the interface with the windowing system (e.g. GLX). See: OpenGL without X.org in linux

TODO: use a 1x1 window, make it un-resizable, and hide it to make things more robust. If I do either of those, the rendering fails, see code comments. Preventing resize seems impossible in Glut, but GLFW supports it. In any case, those don't matter much as my FPS is not limited by the screen refresh frequency, even when offscreen is off.

Other options besides PBO

  • render to backbuffer (default render place)
  • render to a texture
  • render to a Pixelbuffer object (PBO)

Framebuffer and Pixelbuffer are better than the backbuffer and texture since they are made for data to be read back to CPU, while the backbuffer and textures are made to stay on the GPU and show on screen.

PBO is for asynchronous transfers, so I think we don't need it, see: What are the differences between a Frame Buffer Object and a Pixel Buffer Object in OpenGL?,

Maybe off-screen Mesa is worth looking into: http://www.mesa3d.org/osmesa.html

Vulkan

It seems that Vulkan is designed to support offscreen rendering better than OpenGL.

This is mentioned on this NVIDIA overview: https://developer.nvidia.com/transitioning-opengl-vulkan

This is a runnable example that I just managed to run locally: https://github.com/SaschaWillems/Vulkan/tree/b9f0ac91d2adccc3055a904d3a8f6553b10ff6cd/examples/renderheadless/renderheadless.cpp

After installing the drivers and ensuring that the GPU is working I can do:

git clone https://github.com/SaschaWillems/Vulkan
cd Vulkan
git checkout b9f0ac91d2adccc3055a904d3a8f6553b10ff6cd
python download_assets.py
mkdir build
cd build
cmake ..
make -j`nproc`
cd bin
./renderheadless

and this immediately generates an image headless.ppm without opening any windows:

enter image description here

I have also managed to run this program an Ubuntu Ctrl + Alt + F3 non-graphical TTY, which further indicates it really does not need a screen.

Other examples that might be of interest:

Related: Is it possible to do offscreen rendering without Surface in Vulkan?

Tested on Ubuntu 20.04, NVIDIA driver 435.21, NVIDIA Quadro M1200 GPU.

apiretrace

https://github.com/apitrace/apitrace

Just works, and does not require you to modify your code at all:

git clone https://github.com/apitrace/apitrace
cd apitrace
git checkout 7.0
mkdir build
cd build
cmake ..
make
# Creates opengl_executable.out.trace
./apitrace trace /path/to/opengl_executable.out
./apitrace dump-images opengl_executable.out.trace

Also available on Ubuntu 18.10 with:

 sudo apt-get install apitrace

You now have a bunch of screenshots named as:

animation.out.<n>.png

TODO: working principle.

The docs also suggest this for video:

apitrace dump-images -o - application.trace \
  | ffmpeg -r 30 -f image2pipe -vcodec ppm -i pipe: -vcodec mpeg4 -y output.mp4

See also:

WebGL + canvas image saving

This is mostly a toy due to performance, but it kind of works for really basic use cases:

Bibliography

FBO larger than window:

No Window / X11:

我不是你的备胎 2024-09-16 02:26:24

无论如何,你几乎肯定不希望出现过剩。您的要求不符合其预期目的(即使您的要求确实符合其预期目的,您通常也不想要它)。

您可以使用 OpenGL。要在文件中生成输出,您基本上需要设置 OpenGL 来渲染纹理,然后将生成的纹理读入主内存并将其保存到文件中。至少在某些系统(例如 Windows)上,我很确定您仍然需要创建一个窗口并将渲染上下文与该窗口关联起来,尽管如果窗口可能会很好总是隐藏的。

You almost certainly don't want GLUT, regardless. Your requirements don't fit what it's intended to do (and even when your requirements do fit its intended purpose, you usually don't want it anyway).

You can use OpenGL. To generate output in a file, you basically set up OpenGL to render to a texture, and then read the resulting texture into main memory and save it to a file. At least on some systems (e.g., Windows), I'm pretty sure you'll still have to create a window and associate the rendering context with the window, though it will probably be fine if the window is always hidden.

染火枫林 2024-09-16 02:26:24

并不是要放弃其他优秀的答案,但如果您想要一个现有的示例,我们已经在 OpenSCAD 中进行离屏 GL 渲染几年了,作为从命令行渲染到 .png 文件的测试框架的一部分。相关文件位于 https://github.com/openscad/openscad/tree/master/ Offscreen*.cc 下的 src

它在 OSX (CGL)、Linux X11 (GLX)、BSD (GLX) 和 Windows (WGL) 上运行,由于驱动程序差异而存在一些怪癖。基本技巧是忘记打开窗户(就像道格拉斯·亚当斯所说,飞行的秘诀是忘记落地)。如果您有一个像 Xvfb 或 Xvnc 这样运行的虚拟 X11 服务器,它甚至可以在“无头”linux/bsd 上运行。还可以通过在运行程序之前设置环境变量 LIBGL_ALWAYS_SOFTWARE=1 在 Linux/BSD 上使用软件渲染,这在某些情况下会有所帮助。

这不是唯一执行此操作的系统,我相信 VTK 成像系统也执行类似的操作。

这段代码的方法有点旧(我从 Brian Paul 的 glxgears 中删除了 GLX 代码),特别是随着新系统的出现,OSMesa、Mir、Wayland、EGL、Android、Vulkan 等,但请注意 OffscreenXXX.cc文件名,其中 XXX 是 GL 上下文子系统,理论上它可以移植到不同的上下文生成器。

Not to take away from the other excellent answers, but if you want an existing example we have been doing Offscreen GL rendering for a few years now in OpenSCAD, as part of the Test Framework rendering to .png files from the commandline. The relevant files are at https://github.com/openscad/openscad/tree/master/src under the Offscreen*.cc

It runs on OSX (CGL), Linux X11 (GLX), BSD (GLX), and Windows (WGL), with some quirks due to driver differences. The basic trick is to forget to open a window (like, Douglas Adams says the trick to flying is to forget to hit the ground). It even runs on 'headless' linux/bsd if you have a virtual X11 server running like Xvfb or Xvnc. There is also the possibility to use Software Rendering on Linux/BSD by setting the environment variable LIBGL_ALWAYS_SOFTWARE=1 before running your program, which can help in some situations.

This is not the only system to do this, I believe the VTK imaging system does something similar.

This code is a bit old in it's methods, (I ripped the GLX code out of Brian Paul's glxgears), especially as new systems come along, OSMesa, Mir, Wayland, EGL, Android, Vulkan, etc, but notice the OffscreenXXX.cc filenames where XXX is the GL context subsystem, it can in theory be ported to different context generators.

日裸衫吸 2024-09-16 02:26:24

不确定 OpenGL 是最佳解决方案。
但您始终可以渲染到屏幕外缓冲区。

将 openGL 输出写入文件的典型方法是使用 readPixels 将生成的场景像素复制到图像文件

Not sure OpenGL is the best solution.
But you can always render to an off-screen buffer.

The typical way to write openGL output to a file is to use readPixels to copy the resulting scene pixel-pixel to an image file

司马昭之心 2024-09-16 02:26:24

1. 使用带有 PbufferSurface 而不是 WindowSurface 的 EGL 进行离屏渲染

EGLDisplay display;
EGLSurface surface;
EGLContext context;

//bind desktop OpenGL
eglBindAPI(EGL_OPENGL_API);
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, nullptr, nullptr);

//example constraints
const EGLint egl_config_constraints[] = {
    EGL_STENCIL_SIZE, static_cast<EGLint>(8),
    EGL_DEPTH_SIZE, static_cast<EGLint>(16),
    EGL_BUFFER_SIZE, static_cast<EGLint>(32),
    EGL_RED_SIZE, static_cast<EGLint>(8),
    EGL_GREEN_SIZE, static_cast<EGLint>(8),
    EGL_BLUE_SIZE, static_cast<EGLint>(8),
    EGL_ALPHA_SIZE, static_cast<EGLint>(8),
    EGL_SAMPLE_BUFFERS, EGL_FALSE,
    EGL_SAMPLES, 0,
    EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
    EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
    EGL_CONFORMANT, EGL_OPENGL_BIT,
    EGL_CONFIG_CAVEAT, EGL_NONE,
    EGL_NONE };

EGLint configCount;
EGLConfig configs[1];

//find a fitting config
eglChooseConfig(display, egl_config_constraints, configs, 1, &configCount);

//set up the PbufferSurface
EGLint pbuffer_attrib_list[] = {
    EGL_WIDTH, WIDTH,
    EGL_HEIGHT, HEIGHT,
    EGL_NONE };
    
surface = eglCreatePbufferSurface(display, configs[0], pbuffer_attrib_list);

//setup the EGLContext
const EGLint contextVersion[] = {
    EGL_CONTEXT_MAJOR_VERSION, 4,
    EGL_CONTEXT_MINOR_VERSION, 6,
    EGL_CONTEXT_OPENGL_PROFILE_MASK, EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT,
    EGL_CONTEXT_OPENGL_DEBUG, debug ? EGL_TRUE : EGL_FALSE,
    EGL_NONE };
context = eglCreateContext(display, configs[0], EGL_NO_CONTEXT, contextVersion);
eglMakeCurrent(display, surface, surface, context);
eglSwapInterval(display, 1);

2. 保存图像

有多种方法可以实现此目的。实际上上面的代码是演示的一部分(https: //github.com/kallaballa/GCV/blob/main/src/tetra/tetra-demo.cpp)我写的,使用 OpenCL/OpenGL/VAAPI 互操作与 OpenCV 结合来编写渲染内容的视频在OpenGL中。

如果您想构建演示,请注意自述文件。

1. Rendering offscreen using EGL with a PbufferSurface instead of a WindowSurface

EGLDisplay display;
EGLSurface surface;
EGLContext context;

//bind desktop OpenGL
eglBindAPI(EGL_OPENGL_API);
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, nullptr, nullptr);

//example constraints
const EGLint egl_config_constraints[] = {
    EGL_STENCIL_SIZE, static_cast<EGLint>(8),
    EGL_DEPTH_SIZE, static_cast<EGLint>(16),
    EGL_BUFFER_SIZE, static_cast<EGLint>(32),
    EGL_RED_SIZE, static_cast<EGLint>(8),
    EGL_GREEN_SIZE, static_cast<EGLint>(8),
    EGL_BLUE_SIZE, static_cast<EGLint>(8),
    EGL_ALPHA_SIZE, static_cast<EGLint>(8),
    EGL_SAMPLE_BUFFERS, EGL_FALSE,
    EGL_SAMPLES, 0,
    EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
    EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
    EGL_CONFORMANT, EGL_OPENGL_BIT,
    EGL_CONFIG_CAVEAT, EGL_NONE,
    EGL_NONE };

EGLint configCount;
EGLConfig configs[1];

//find a fitting config
eglChooseConfig(display, egl_config_constraints, configs, 1, &configCount);

//set up the PbufferSurface
EGLint pbuffer_attrib_list[] = {
    EGL_WIDTH, WIDTH,
    EGL_HEIGHT, HEIGHT,
    EGL_NONE };
    
surface = eglCreatePbufferSurface(display, configs[0], pbuffer_attrib_list);

//setup the EGLContext
const EGLint contextVersion[] = {
    EGL_CONTEXT_MAJOR_VERSION, 4,
    EGL_CONTEXT_MINOR_VERSION, 6,
    EGL_CONTEXT_OPENGL_PROFILE_MASK, EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT,
    EGL_CONTEXT_OPENGL_DEBUG, debug ? EGL_TRUE : EGL_FALSE,
    EGL_NONE };
context = eglCreateContext(display, configs[0], EGL_NO_CONTEXT, contextVersion);
eglMakeCurrent(display, surface, surface, context);
eglSwapInterval(display, 1);

2. Saving the images

There are several ways to achieve this. Actually the above code is part of a demo (https://github.com/kallaballa/GCV/blob/main/src/tetra/tetra-demo.cpp) i wrote that uses OpenCL/OpenGL/VAAPI interop in conjunction with OpenCV to write a video of what is rendered in OpenGL.

Please mind the README if you want to build the demo.

~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文