Skip to content

Instantly share code, notes, and snippets.

@camthesaxman
Last active September 28, 2023 20:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save camthesaxman/0af28359307a2f122417010e5ddecfb2 to your computer and use it in GitHub Desktop.
Save camthesaxman/0af28359307a2f122417010e5ddecfb2 to your computer and use it in GitHub Desktop.
Native application to render ShaderToy (www.shadertoy.com) shaders
/**
* Native application to render ShaderToy (www.shadertoy.com) shaders
*
* Requirements:
* GLFW - https://www.glfw.org/
* libplum - https://github.com/aaaaaa123456789/libplum
* libavcodec, libavformat, libavutil, libswscale (optional, for video capture) - https://ffmpeg.org/
* portaudio (optional, for sound support)
*/
#ifdef _WIN32
// Windows doesn't give us GL 2.0 features, so we need to load them with glew.
#define GLEW_STATIC
#include <GL/glew.h>
#else
#define GL_GLEXT_PROTOTYPES
#endif
#include <GLFW/glfw3.h>
#include <assert.h>
#include <ctype.h>
#include <errno.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <time.h>
#if FEATURE_SOUND
#include <portaudio.h>
#endif
#if FEATURE_VIDEO_CAPTURE
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#endif
#include "libplum.h"
#define MIN_SCREEN_WIDTH 32
#define MIN_SCREEN_HEIGHT 32
static GLFWwindow *window;
static int screenWidth = 640;
static int screenHeight = 480;
static int soundSampleRate = 44100;
static bool vsync = true;
// input to iMouse (x,y,z,w)
//
// x,y are the x,y coordinates of the last mouse position where the button was down.
//
// z,w are the x,y coordinates where the mouse button went down.
// On the frame when the button went down, these are both positive.
// On the following frames (button still held), w is negative, while z remains positive.
// When the mouse button is not held, both w and z are negative.
static float mouseVec[4];
struct Channel
{
const char *def;
GLuint texture;
};
struct ShaderProgram
{
const char *filename;
const char *target;
struct Channel channels[4];
GLuint fragmentShader;
GLuint program;
bool inUse;
bool isBuffer;
GLuint bufferTexture; // for buffers, the texture to render to
GLuint frameBuffer; // for buffers, the framebuffer to render to
int vpWidth, vpHeight; // dimensions of the rendered shader
// attribute locations
GLint positionLocation;
// uniform locations
GLint iResolutionLocation;
GLint iTimeLocation;
GLint iTimeDeltaLocation;
GLint iFrameLocation;
GLint iChannelTimeLocation;
GLint iMouseLocation;
GLint iDateLocation;
GLint iSampleRateLocation;
GLint iChannelResolutionLocation;
GLint iChannelLocations[4];
};
static struct ShaderProgram imageShader;
static struct ShaderProgram soundShader;
static struct ShaderProgram bufferShaders[4]; // bufferA, bufferB, bufferC, bufferD
static char *commonShaderFileName = NULL;
// Vertex shader
static const char vertexShaderSource[] = "attribute vec4 a_position;void main(){gl_Position=a_position;}";
static GLuint vertexShader;
static bool capturing = false; // set to true if we are capturing video to a file
static int captureFPS = 60;
static const char *captureFileName = NULL;
#if FEATURE_SOUND
static void play_sound(void);
static void pause_sound(void);
static void reset_sound(void);
#endif
#if FEATURE_VIDEO_CAPTURE
static bool start_video_capture(const char *filename);
static bool capture_frame(void);
static void stop_video_capture(void);
#endif
static bool init_shader_channels(struct ShaderProgram *shader);
//------------------------------------------------------------------------------
// Timer
//------------------------------------------------------------------------------
static bool paused = false;
static int frame = 0; // current frame number
static double animTime = 0.0; // current time in the animation
static double prevAnimTime = 0.0; // time of the previous animation frame
static double deltaTime = 0.0;
/**
* Updates the timer after a frame
*/
static void update_timer(void)
{
assert(!paused);
prevAnimTime = animTime;
if (capturing)
animTime += 1.0 / captureFPS; // ensure a consistent FPS in the output video
else
animTime = glfwGetTime();
deltaTime = animTime - prevAnimTime;
frame++;
}
static void pause_timer(void)
{
assert(!paused);
paused = true;
}
static void unpause_timer(void)
{
assert(paused);
glfwSetTime(animTime);
paused = false;
}
static void reset_timer(void)
{
animTime = prevAnimTime = deltaTime = 0.0;
glfwSetTime(0.0);
frame = 0;
}
//------------------------------------------------------------------------------
// Text functions
//------------------------------------------------------------------------------
/**
* Extracts the base name of the file (without directory), and truncates it with
* ellipsis if it is too long.
*/
static void get_short_file_name(const char *filename, char *buffer, int length)
{
const char *slash = strrchr(filename, '/');
#ifdef _WIN32
const char *backslash = strrchr(filename, '\\');
slash = (slash > backslash) ? slash : backslash;
#endif
if (slash != NULL)
filename = slash + 1;
strncpy(buffer, filename, length);
buffer[length - 1] = 0;
// Add ellipsis
if (strlen(filename) > length - 1)
buffer[length - 2] = buffer[length - 3] = buffer[length - 4] = '.';
}
/**
* Reads the contents of a file into an allocated, null terminated string
*/
static char *read_text_file(const char *filename)
{
char *contents = NULL;
FILE *f;
long int length;
if ((f = fopen(filename, "r")) != NULL) // open file
{
if (fseek(f, 0, SEEK_END) == 0 && (length = ftell(f)) != -1L && fseek(f, 0, SEEK_SET) == 0) // get length
{
if ((contents = malloc(length + 1)) != NULL) // allocate buffer
{
if (fread(contents, length, 1, f) == 1) // read file
contents[length] = 0; // null terminate the buffer
else
{
fprintf(stderr, "Error reading file '%s': %s\n", filename, strerror(errno));
free(contents);
contents = NULL;
}
}
else
fprintf(stderr, "Cannot allocate memory: file '%s' is too large.\n", filename);
}
else
fprintf(stderr, "Cannot determine length of file '%s': %s\n", filename, strerror(errno));
fclose(f);
}
else
fprintf(stderr, "Cannot open file '%s': %s\n", filename, strerror(errno));
return contents;
}
/**
* Splits `string` on character `c` and returns what comes after `c`
*/
static char *split_on(char *string, char c)
{
char *split = strchr(string, c);
if (split == NULL)
return string + strlen(string); // advance to the end
*split = 0; // terminate what came before
return split + 1;
}
/**
* Extracts the file name extension (after the dot). If there is no dot, the
* the empty string ("") is returned.
*/
static const char *get_file_extension(const char *filename)
{
const char *dot = strrchr(filename, '.');
return (dot == NULL) ? "" : dot + 1;
}
//------------------------------------------------------------------------------
static void update_title_bar_info(void)
{
static double lastUpdate;
static double fps;
static const double fpsFilter = 0.25;
// smooth out the FPS
if (deltaTime > 0.0)
fps = fps * (1.0 - fpsFilter) + (1.0 / deltaTime) * fpsFilter;
// Calling glfwSetWindowTitle is very expensive, so reduce CPU usage by
// updating the title bar no more than 5 times per second
if (animTime - lastUpdate > 0.2 || paused || frame == 0)
{
char title[100];
char filename[30];
lastUpdate = animTime;
get_short_file_name(imageShader.filename, filename, sizeof(filename));
sprintf(title, "%s | Time: %.2fs | FPS: %.0f", filename, animTime, fps);
glfwSetWindowTitle(window, title);
}
}
static void check_gl_errors(void)
{
#ifndef NDEBUG
GLenum error = glGetError();
switch (error)
{
case GL_NO_ERROR:
return;
#define HANDLE_ERROR(err) case err: fputs(#err "\n", stderr); break;
HANDLE_ERROR(GL_INVALID_ENUM)
HANDLE_ERROR(GL_INVALID_VALUE)
HANDLE_ERROR(GL_INVALID_OPERATION)
HANDLE_ERROR(GL_INVALID_FRAMEBUFFER_OPERATION)
HANDLE_ERROR(GL_OUT_OF_MEMORY)
HANDLE_ERROR(GL_STACK_UNDERFLOW)
HANDLE_ERROR(GL_STACK_OVERFLOW)
#undef HANDLE_ERROR
default:
fprintf(stderr, "Other GL error: %i\n", error);
break;
}
assert(0);
#endif
}
//------------------------------------------------------------------------------
/**
* Compiles a new OpenGL shader
* @param type the type of shader (GL_VERTEX_SHADER or GL_FRAGMENT_SHADER)
* @param sources array of strings containing the shader source code
* @param count length of the `sources` array
* @return identifier of the shader, or 0 on failure
*/
static GLuint compile_shader(GLenum type, int count, const char **sources)
{
GLint lengths[count];
GLuint shader;
GLint compiled;
for (int i = 0; i < count; i++)
lengths[i] = strlen(sources[i]);
shader = glCreateShader(type);
glShaderSource(shader, count, sources, lengths);
glCompileShader(shader);
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled)
{
GLint logLength = 0;
char *log = NULL;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength)
log = malloc(logLength);
glGetShaderInfoLog(shader, logLength, &logLength, log);
fprintf(stderr, "Shader compilation failed:\n%s\n", log);
if (log)
free(log);
return 0;
}
return shader;
}
/**
* Creates a new OpenGL program object with the specified vertex and fragment shaders
* @return identifier of the program, or 0 on failure
*/
static GLuint link_program(GLuint vertexShader, GLuint fragmentShader)
{
GLint linked;
GLuint program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if (!linked)
{
GLint logLength = 0;
char *log = NULL;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength);
if (logLength)
log = malloc(logLength);
glGetProgramInfoLog(program, logLength, &logLength, log);
fprintf(stderr, "Failed to link program:\n%s\n", log);
if (log)
free(log);
return 0;
}
return program;
}
/**
* Compiles and links the shader, and determines its input locations
*/
static bool setup_shader_program(struct ShaderProgram *shader)
{
static const char fragHeader[] =
"#version 130\n"
"uniform vec3 iResolution;\n"
"uniform float iTime;\n"
"uniform float iTimeDelta;\n"
"uniform float iFrame;\n"
"uniform float iChannelTime[4];\n"
"uniform vec4 iMouse;\n"
"uniform vec4 iDate;\n"
"uniform float iSampleRate;\n"
"uniform vec3 iChannelResolution[4];\n"
"uniform sampler2D iChannel0;\n"
"uniform sampler2D iChannel1;\n"
"uniform sampler2D iChannel2;\n"
"uniform sampler2D iChannel3;\n"
"#line 0\n";
static const char imageFragFooter[] =
"\nvoid main(){mainImage(gl_FragColor,gl_FragCoord.xy);}";
static const char soundFragFooter[] =
"\nuniform float iBlockOffset;void main(){\n"
// compute time based on the pixel we're about to write
"float t = iBlockOffset + ((gl_FragCoord.x-0.5) + (gl_FragCoord.y-0.5)*512.0)/iSampleRate;\n"
"vec2 y = mainSound(0,t);\n"
"vec2 v = floor((0.5+0.5*y)*65536.0);\n"
"vec2 vl = mod(v,256.0)/255.0;\n"
"vec2 vh = floor(v/256.0)/255.0;\n"
"gl_FragColor = vec4(vl.x,vh.x,vl.y,vh.y);\n"
// "gl_FragColor = vec4(0,1,0,1);\n"
"}";
char *source;
char *commonSource = NULL;
const GLchar *sourceList[5];
int sourceCount = 0;
printf("Compiling shader '%s'\n", shader->filename);
// Read shader source
source = read_text_file(shader->filename);
if (source == NULL)
return false;
if (commonShaderFileName != NULL)
{
commonSource = read_text_file(commonShaderFileName);
if (commonSource == NULL)
{
free(source);
return false;
}
}
// Destroy the previous program and fragment shader
if (shader->program != 0)
{
if (shader->fragmentShader != 0)
{
glDetachShader(shader->program, shader->fragmentShader);
glDeleteShader(shader->fragmentShader);
shader->fragmentShader = 0;
}
glDeleteProgram(shader->program);
shader->program = 0;
}
sourceList[sourceCount++] = fragHeader;
if (commonSource != NULL)
{
sourceList[sourceCount++] = commonSource;
sourceList[sourceCount++] = "#line 0\n";
}
sourceList[sourceCount++] = source;
sourceList[sourceCount++] = shader == &soundShader ? soundFragFooter : imageFragFooter;
// Compile and link the new program
shader->fragmentShader = compile_shader(GL_FRAGMENT_SHADER, sourceCount, sourceList);
free(source);
if (commonSource != NULL)
free(commonSource);
if (shader->fragmentShader == 0)
return false;
shader->program = link_program(vertexShader, shader->fragmentShader);
if (shader->program == 0)
return false;
// Get attribute locations
shader->positionLocation = glGetAttribLocation(shader->program, "a_position");
if (shader->positionLocation == -1)
{
fputs("Attribute a_position not found\n", stderr);
return false;
}
// Get uniform locations
shader->iResolutionLocation = glGetUniformLocation(shader->program, "iResolution");
shader->iTimeLocation = glGetUniformLocation(shader->program, "iTime");
shader->iTimeDeltaLocation = glGetUniformLocation(shader->program, "iTimeDelta");
shader->iFrameLocation = glGetUniformLocation(shader->program, "iFrame");
shader->iChannelTimeLocation = glGetUniformLocation(shader->program, "iChannelTime");
shader->iMouseLocation = glGetUniformLocation(shader->program, "iMouse");
shader->iDateLocation = glGetUniformLocation(shader->program, "iDate");
shader->iSampleRateLocation = glGetUniformLocation(shader->program, "iSampleRate");
shader->iChannelResolutionLocation = glGetUniformLocation(shader->program, "iChannelResolution");
shader->iChannelLocations[0] = glGetUniformLocation(shader->program, "iChannel0");
shader->iChannelLocations[1] = glGetUniformLocation(shader->program, "iChannel1");
shader->iChannelLocations[2] = glGetUniformLocation(shader->program, "iChannel2");
shader->iChannelLocations[3] = glGetUniformLocation(shader->program, "iChannel3");
if (shader->iDateLocation != -1)
fputs("Warning: iDate not supported yet\n", stderr);
//if (shader->iSampleRateLocation != -1)
// fputs("Warning: iSampleRate not supported yet\n", stderr);
if (shader->iChannelResolutionLocation != -1)
fputs("Warning: iChannelResolution not supported yet\n", stderr);
return true;
}
//------------------------------------------------------------------------------
// Rendering
//------------------------------------------------------------------------------
static int viewportWidth, viewportHeight;
/**
* Renders the shader program on a viewport-sized quad
* If the specified shader is an image shader, the output is the screen.
* If the specified shader is a buffer, the output is to a texture
*/
static void do_shader(const struct ShaderProgram *shader)
{
glUseProgram(shader->program);
// Bind the position attribute
glEnableVertexAttribArray(shader->positionLocation);
glVertexAttribPointer(shader->positionLocation, 2, GL_BYTE, GL_FALSE, 0, 0);
// Set uniform shader inputs
if (-1 != shader->iResolutionLocation) glUniform3f(shader->iResolutionLocation, screenWidth, screenHeight, 0.0f);
if (-1 != shader->iTimeLocation) glUniform1f(shader->iTimeLocation, animTime);
if (-1 != shader->iTimeDeltaLocation) glUniform1f(shader->iTimeDeltaLocation, deltaTime);
if (-1 != shader->iFrameLocation) glUniform1f(shader->iFrameLocation, frame);
// For now, set all channel times to animTime
if (-1 != shader->iChannelTimeLocation) glUniform1fv(shader->iChannelTimeLocation, 4, (GLfloat[]) { animTime, animTime, animTime, animTime} );
if (-1 != shader->iMouseLocation) glUniform4fv(shader->iMouseLocation, 1, mouseVec);
if (-1 != shader->iSampleRateLocation) glUniform1f(shader->iSampleRateLocation, (float)soundSampleRate);
// TODO: iDate, iChannelResolution
// iChannel
for (int i = 0; i < 4; i++)
{
if (shader->iChannelLocations[i] != -1)
{
// Bind the channel's texture to a texture unit (the texture unit number is the same as the channel number)
glActiveTexture(GL_TEXTURE0 + i);
glBindTexture(GL_TEXTURE_2D, shader->channels[i].texture);
glUniform1i(shader->iChannelLocations[i], i);
}
}
if (shader->isBuffer)
{
// Render to the FBO instead
glBindFramebuffer(GL_FRAMEBUFFER, shader->frameBuffer);
glDrawBuffers(1, (GLenum[]){GL_COLOR_ATTACHMENT0});
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
{
fputs("Can't render to the texture\n", stderr);
return;
}
}
if (viewportWidth != shader->vpWidth || viewportHeight != shader->vpHeight)
{
viewportWidth = shader->vpWidth;
viewportHeight = shader->vpHeight;
glViewport(0, 0, viewportWidth, viewportHeight);
}
// Draw it
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
if (shader->isBuffer)
glBindFramebuffer(GL_FRAMEBUFFER, 0);
}
/**
* Renders all shaders
*/
static void render(void)
{
if (imageShader.program == 0)
return;
//glClear(GL_COLOR_BUFFER_BIT);
// render buffers
for (int i = 0; i < 4; i++)
if (bufferShaders[i].inUse)
do_shader(&bufferShaders[i]);
// render the image
do_shader(&imageShader);
check_gl_errors();
}
//------------------------------------------------------------------------------
// GLFW event callbacks
//------------------------------------------------------------------------------
static void on_resize(GLFWwindow* window, int width, int height)
{
screenWidth = width;
screenHeight = height;
//glViewport(0, 0, width, height);
imageShader.vpWidth = width;
imageShader.vpHeight = height;
// Resize buffer textures. They should always be viewport size, according to Shadertoy
for (int i = 0; i < 4; i++)
{
if (bufferShaders[i].inUse && bufferShaders[i].bufferTexture != 0)
{
bufferShaders[i].vpWidth = width;
bufferShaders[i].vpHeight = height;
glBindTexture(GL_TEXTURE_2D, bufferShaders[i].bufferTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
}
}
render();
glfwSwapBuffers(window);
}
static void on_mouse_move(GLFWwindow* window, double xpos, double ypos)
{
if (mouseVec[2] > 0.0) // z is positive when the button is pressed
{
mouseVec[0] = xpos;
mouseVec[1] = screenHeight - 1 - ypos;
}
}
static void on_mouse_button(GLFWwindow* window, int button, int action, int mods)
{
if (button == GLFW_MOUSE_BUTTON_LEFT)
{
double xpos, ypos;
glfwGetCursorPos(window, &xpos, &ypos);
if (action == GLFW_PRESS)
{
// coordinates of the last position when the mouse was down
mouseVec[0] = xpos;
mouseVec[1] = screenHeight - 1 - ypos;
// coordinates of the mouse position when it went down
mouseVec[2] = mouseVec[0];
mouseVec[3] = mouseVec[1];
}
else
{
// Make z negative when releasing the mouse
if (mouseVec[2] > 0.0)
mouseVec[2] = -mouseVec[2];
}
}
}
static void dump_shader_info(struct ShaderProgram *shader)
{
printf("Shader %s\n", shader->target);
printf(" Uniform Locations:\n"
" iResolution: %3i\n"
" iTime: %3i\n"
" iTimeDelta: %3i\n"
" iFrame %3i\n"
" iChannelTime: %3i\n"
" iMouse: %3i\n"
" iDate: %3i\n"
" iSampleRate: %3i\n"
" iChannelResolution: %3i\n"
" iChannel0 %3i ('%s', texture ID %i)\n"
" iChannel1 %3i ('%s', texture ID %i)\n"
" iChannel2 %3i ('%s', texture ID %i)\n"
" iChannel3 %3i ('%s', texture ID %i)\n"
" FBO: %i (texture ID %i)\n",
shader->iResolutionLocation,
shader->iTimeLocation,
shader->iTimeDeltaLocation,
shader->iFrameLocation,
shader->iChannelTimeLocation,
shader->iMouseLocation,
shader->iDateLocation,
shader->iSampleRateLocation,
shader->iChannelResolutionLocation,
shader->iChannelLocations[0], shader->channels[0].def, shader->channels[0].texture,
shader->iChannelLocations[1], shader->channels[1].def, shader->channels[1].texture,
shader->iChannelLocations[2], shader->channels[2].def, shader->channels[2].texture,
shader->iChannelLocations[3], shader->channels[3].def, shader->channels[3].texture,
shader->frameBuffer, shader->bufferTexture);
}
static void on_key(GLFWwindow* window, int key, int scancode, int action, int mods)
{
static int prevX, prevY, prevWidth, prevHeight;
if (action == GLFW_PRESS)
{
switch (key)
{
case GLFW_KEY_SPACE:
if (paused)
{
unpause_timer();
play_sound();
}
else
{
pause_timer();
pause_sound();
}
update_title_bar_info();
break;
case GLFW_KEY_F11:
// toggle full screen
if (glfwGetWindowMonitor(window) == NULL)
{
GLFWmonitor *monitor = glfwGetPrimaryMonitor();
const GLFWvidmode *mode = glfwGetVideoMode(monitor);
glfwGetWindowPos(window, &prevX, &prevY);
glfwGetWindowSize(window, &prevWidth, &prevHeight);
glfwSetWindowMonitor(window, monitor, 0, 0, mode->width, mode->height, mode->refreshRate);
}
else
glfwSetWindowMonitor(window, NULL, prevX, prevY, prevWidth, prevHeight, 0);
break;
case GLFW_KEY_R:
reset_timer();
reset_sound();
if (paused)
{
render();
update_title_bar_info();
glfwSwapBuffers(window);
}
break;
case GLFW_KEY_D:
dump_shader_info(&imageShader);
for (int i = 0; i < 4; i++)
if (bufferShaders[i].inUse)
dump_shader_info(&bufferShaders[i]);
break;
}
}
}
//------------------------------------------------------------------------------
// Sound
//------------------------------------------------------------------------------
#if FEATURE_SOUND
#define MAX_SAMPLES (180 * soundSampleRate)
#define SOUND_TEXTURE_WIDTH 512
#define SOUND_MAP_ROWS ((MAX_SAMPLES + SOUND_TEXTURE_WIDTH - 1) / SOUND_TEXTURE_WIDTH)
static int currSample = 0;
static uint8_t *soundMap;
static PaStream *soundStream;
static int stream_callback(
const void *input, void *output, unsigned long int frameCount,
const PaStreamCallbackTimeInfo *timeInfo,
PaStreamCallbackFlags statusFlags,
void *userData)
{
int16_t *out = output;
int start = currSample;
int end = start + frameCount;
if (start >= MAX_SAMPLES)
return paComplete;
if (end > MAX_SAMPLES)
end = MAX_SAMPLES;
for (int i = start; i < end; i++)
{
uint8_t r = soundMap[i * 4 + 0];
uint8_t g = soundMap[i * 4 + 1];
uint8_t b = soundMap[i * 4 + 2];
uint8_t a = soundMap[i * 4 + 3];
int left = r | (g << 8);
int right = b | (a << 8);
*out++ = left - 32768;
*out++ = right - 32768;
}
currSample += frameCount;
return paContinue;
}
static void render_sound(void)
{
int textureHeight = soundShader.vpHeight;
GLint iBlockOffsetLocation = glGetUniformLocation(soundShader.program, "iBlockOffset");
puts("rendering sound");
glUseProgram(soundShader.program);
for (int y = 0; y < SOUND_MAP_ROWS; y += textureHeight)
{
void *dest = (uint32_t *)soundMap + y * SOUND_TEXTURE_WIDTH;
float t = (float)(y * SOUND_TEXTURE_WIDTH) / (float)soundSampleRate;
int height = SOUND_MAP_ROWS - y;
if (height > textureHeight)
height = textureHeight;
glUniform1f(iBlockOffsetLocation, t);
do_shader(&soundShader);
glBindFramebuffer(GL_READ_FRAMEBUFFER, soundShader.frameBuffer);
glReadPixels(0, 0, SOUND_TEXTURE_WIDTH, height, GL_RGBA, GL_UNSIGNED_BYTE, dest);
}
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
}
static bool init_sound(void)
{
if (soundShader.inUse)
{
GLint maxTextureSize;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
//printf("max texture size: %i\n", maxTextureSize);
int soundTextureHeight = maxTextureSize;
soundMap = malloc(SOUND_TEXTURE_WIDTH * SOUND_MAP_ROWS * sizeof(uint32_t));
// Set up sound shader
soundShader.isBuffer = true;
soundShader.vpWidth = SOUND_TEXTURE_WIDTH;
soundShader.vpHeight = soundTextureHeight;
if (!init_shader_channels(&soundShader) || !setup_shader_program(&soundShader))
return false;
// set up rendering to texture
glGenTextures(1, &soundShader.bufferTexture);
glBindTexture(GL_TEXTURE_2D, soundShader.bufferTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, SOUND_TEXTURE_WIDTH, soundTextureHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
// Create framebuffer object
glGenFramebuffers(1, &soundShader.frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, soundShader.frameBuffer);
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, soundShader.frameBuffer, 0);
check_gl_errors();
render_sound();
// Initialize portaudio
PaError err = Pa_Initialize();
if (err != paNoError)
{
fprintf(stderr, "Failed to initialize audio: %s\n", Pa_GetErrorText(err));
return false;
}
err = Pa_OpenDefaultStream(&soundStream, 0, 2, paInt16, soundSampleRate, 256, stream_callback, NULL);
if (err != paNoError)
{
fprintf(stderr, "Failed to open audio stream: %s\n", Pa_GetErrorText(err));
return false;
}
}
return true;
}
static void play_sound(void)
{
if (soundStream != NULL)
{
PaError err = Pa_StartStream(soundStream);
if (err != paNoError)
fprintf(stderr, "Failed to start audio stream: %s\n", Pa_GetErrorText(err));
}
}
static void pause_sound(void)
{
if (soundStream != NULL)
{
PaError err = Pa_StopStream(soundStream);
if (err != paNoError)
fprintf(stderr, "Failed to stop audio stream: %s\n", Pa_GetErrorText(err));
}
}
static void reset_sound(void)
{
pause_sound();
currSample = 0;
if (!paused)
play_sound();
}
static void cleanup_sound(void)
{
if (soundStream != NULL)
Pa_CloseStream(soundStream);
if (soundShader.inUse)
Pa_Terminate();
if (soundMap != NULL)
free(soundMap);
}
#endif
//------------------------------------------------------------------------------
// Command-line
//------------------------------------------------------------------------------
static void usage(const char *program)
{
fprintf(stderr,
"usage: %s [OPTIONS...] SHADER_DEF...\n"
"Renders OpenGL fragment shaders\n"
"\n"
"Each SHADER_DEF argument defines a shader and is of the following form:\n"
" [TARGET:]FILENAME[,CHANNELS...]\n"
"\n"
" TARGET specifies the output of the shader. These correspond to the editor tabs\n"
" on www.shadertoy.com.\n"
" Valid target values are:\n"
" image Renders to the screen. This is the default if not specified.\n"
" bufferA Renders to buffer A\n"
" bufferB Renders to buffer B\n"
" bufferC Renders to buffer C\n"
" bufferD Renders to buffer D\n"
#if FEATURE_SOUND
" sound Generates audio.\n"
#endif
"\n"
" CHANNELS is a comma-separated list specifying the sources of the shader's\n"
" iChannel0-iChannel3 inputs,\n"
" Channels may be one of the following:\n"
" bufferA The output from buffer A\n"
" bufferB The output from buffer B\n"
" bufferC The output from buffer C\n"
" bufferD The output from buffer D\n"
" (filename).png Texture in PNG format\n"
" (filename).jpg Texture in JPEG format\n"
" gray-noise-medium A 256x256 grayscale noise texture\n"
" gray-noise-small A 64x64 grayscale noise texture\n"
" rgba-noise-medium A 256x256 RGBA noise texture\n"
" rgba-noise-small A 64x64 RGBA noise texture\n"
"\n"
"OPTIONS:\n"
" -common FILE Adds the code from FILE to the beginning of every other\n"
" shader's code. This is used to share functions between\n"
" shaders.\n"
" -r WIDTHxHEIGHT Sets the initial window resolution\n"
" -h Print this help message\n"
" -novsync Disable VSync (un-caps FPS)\n"
#ifdef FEATURE_VIDEO_CAPTURE
" -capture FILE Captures video output to FILE\n"
#endif
"\n"
"Key bindings:\n"
" Spacebar Pause/Unpause the animation\n"
" F11 Toggles full screen\n"
" R Restarts animation from beginning\n",
program);
}
/**
* Parses a shader definition
* A shader definition is of the form "target:filename,channel0,channel1,channel2,channel3"
* See the usage message for an explanation
* @return true if successful, false if failed
*/
static bool parse_shader_def(char *def)
{
char *target;
char *channels;
struct ShaderProgram *shader;
// Parse target: field
target = "image"; // default if not specified
if (strchr(def, ':'))
{
target = def;
def = split_on(def, ':');
}
// find which shader we're dealing with
if (strcmp(target, "image") == 0) shader = &imageShader;
else if (strcmp(target, "bufferA") == 0) shader = &bufferShaders[0];
else if (strcmp(target, "bufferB") == 0) shader = &bufferShaders[1];
else if (strcmp(target, "bufferC") == 0) shader = &bufferShaders[2];
else if (strcmp(target, "bufferD") == 0) shader = &bufferShaders[3];
#if FEATURE_SOUND
else if (strcmp(target, "sound") == 0) shader = &soundShader;
#endif
else
{
fprintf(stderr, "No such target '%s'\n", target);
return false;
}
// Don't allow duplicates
if (shader->inUse)
{
fprintf(stderr, "Already specified a shader for target '%s'\n", target);
return false;
}
shader->target = target;
shader->inUse = true;
shader->filename = def;
if (shader->filename[0] == 0)
{
fprintf(stderr, "Missing filename for %s\n", target);
return false;
}
// Parse channels
channels = split_on(def, ',');
for (int i = 0; i < 4; i++)
{
shader->channels[i].def = channels;
channels = split_on(channels, ',');
}
if (*channels != 0) // junk at end
return false;
return true;
}
static bool parse_args(int argc, char **argv)
{
if (argc < 2)
goto invalid;
for (int i = 1; i < argc; i++)
{
char *arg = argv[i];
if (arg[0] == '-')
{
if (strcmp(arg, "-common") == 0)
{
if (++i < argc)
{
if (commonShaderFileName == NULL)
{
commonShaderFileName = argv[i];
continue;
}
fputs("Already specified common shader\n", stderr);
}
}
else if (strcmp(arg, "-h") == 0)
{
usage(argv[0]);
exit(EXIT_SUCCESS);
}
else if (strcmp(arg, "-r") == 0)
{
if (++i < argc && sscanf(argv[i], "%dx%d", &screenWidth, &screenHeight) == 2)
continue;
}
else if (strcmp(arg, "-novsync") == 0)
{
vsync = false;
continue;
}
#if FEATURE_VIDEO_CAPTURE
else if (strcmp(arg, "-capture") == 0)
{
if (++i < argc)
{
captureFileName = argv[i];
continue;
}
}
#endif
else
fprintf(stderr, "Unrecognized option '%s'\n", arg);
}
else
{
if (parse_shader_def(arg))
continue;
}
goto invalid;
}
if (!imageShader.inUse)
{
fputs("no image shader specified\n", stderr);
goto invalid;
}
return true;
invalid:
usage(argv[0]);
return false;
}
static GLuint rgba_noise_texture(int width, int height);
static GLuint gray_noise_texture(int width, int height);
static GLuint image_texture(const char *filename);
static bool init_shader_channels(struct ShaderProgram *shader)
{
// init channels
for (int i = 0; i < 4; i++)
{
struct Channel *channel = &shader->channels[i];
if (channel->def[0] != 0)
{
const char *ext = get_file_extension(channel->def);
glActiveTexture(GL_TEXTURE0 + i); // use the same texture unit number as the channel number
// external textures
if (strcmp(ext, "png") == 0 || strcmp(ext, "jpg") == 0) channel->texture = image_texture(channel->def);
// buffers
else if (strcmp(channel->def, "bufferA") == 0) channel->texture = bufferShaders[0].bufferTexture;
else if (strcmp(channel->def, "bufferB") == 0) channel->texture = bufferShaders[1].bufferTexture;
else if (strcmp(channel->def, "bufferC") == 0) channel->texture = bufferShaders[2].bufferTexture;
else if (strcmp(channel->def, "bufferD") == 0) channel->texture = bufferShaders[3].bufferTexture;
// procedurally-generated textures
else if (strcmp(channel->def, "gray-noise-medium") == 0) channel->texture = gray_noise_texture(256, 256);
else if (strcmp(channel->def, "gray-noise-small") == 0) channel->texture = gray_noise_texture(64, 64);
else if (strcmp(channel->def, "rgba-noise-medium") == 0) channel->texture = rgba_noise_texture(256, 256);
else if (strcmp(channel->def, "rgba-noise-small") == 0) channel->texture = rgba_noise_texture(64, 64);
else
{
fprintf(stderr, "Unrecognized channel input '%s'\n", channel->def);
return false;
}
if (channel->texture == 0)
fprintf(stderr, "Warning: %s shader's iChannel%i does not have a texture\n", shader->target, i);
if (channel->texture != 0 && channel->texture == shader->bufferTexture)
fputs("Warning: rendering a buffer to itself is not currently supported!\n", stderr);
}
}
return true;
}
int main(int argc, char **argv)
{
GLuint buffer;
static const GLbyte verts[] = { 1, 1, -1, 1, 1, -1, -1, -1 };
int retcode = EXIT_FAILURE;
char **myArgv = NULL;
for (int i = 0; i < 4; i++)
bufferShaders[i].isBuffer = true;
if (argc == 2 && strcmp(get_file_extension(argv[1]), "opt") == 0)
{
char *fileOpts;
int myArgc = 0;
// Read options from file instead
printf("reading options from '%s'\n", argv[1]);
fileOpts = read_text_file(argv[1]);
if (fileOpts == NULL)
goto cleanup;
myArgv = malloc(1 * sizeof(*myArgv));
myArgv[myArgc++] = argv[0];
for (char *c = fileOpts; *c != 0; c++)
{
while (isspace(*c))
c++;
if (*c == 0)
break;
myArgv = realloc(myArgv, (myArgc + 1) * sizeof(*myArgv));
myArgv[myArgc++] = c;
while (*c != 0 && !isspace(*c))
c++;
if (*c == 0)
break;
*c = 0;
}
if (!parse_args(myArgc, myArgv))
goto cleanup;
}
else
{
if (!parse_args(argc, argv))
goto cleanup;
}
if (screenWidth < MIN_SCREEN_WIDTH || screenHeight < MIN_SCREEN_HEIGHT)
{
screenWidth = MIN_SCREEN_WIDTH;
screenHeight = MIN_SCREEN_HEIGHT;
fprintf(stderr, "Sorry, the minimum resolution is %ix%i\n", MIN_SCREEN_WIDTH, MIN_SCREEN_HEIGHT);
}
if (!glfwInit())
{
fputs("Failed to initialize GLFW\n", stderr);
goto cleanup;
}
glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_API);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_ANY_PROFILE);
if (captureFileName != NULL)
glfwWindowHint(GLFW_RESIZABLE, GLFW_FALSE); // Don't allow resizing the viewport while recording
window = glfwCreateWindow(screenWidth, screenHeight, "Shader Toy", NULL, NULL);
glfwSetWindowSizeLimits(window, MIN_SCREEN_WIDTH, MIN_SCREEN_HEIGHT, GLFW_DONT_CARE, GLFW_DONT_CARE);
glfwSetFramebufferSizeCallback(window, on_resize);
glfwSetCursorPosCallback( window, on_mouse_move);
glfwSetMouseButtonCallback( window, on_mouse_button);
glfwSetKeyCallback( window, on_key);
glfwMakeContextCurrent(window);
#ifdef _WIN32
GLenum err = glewInit();
if (err != GLEW_OK)
{
fprintf(stderr, "Failed to initialize GLEW: %s\n", glewGetErrorString(err));
goto cleanup;
}
#endif
glfwSwapInterval(vsync ? 1 : 0);
on_resize(window, screenWidth, screenHeight); // set up initial viewport
// set up geometry
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(verts), verts, GL_STATIC_DRAW);
// compile simple vertex shader
vertexShader = compile_shader(GL_VERTEX_SHADER, 1, (const char *[]) { vertexShaderSource });
imageShader.vpWidth = screenWidth;
imageShader.vpHeight = screenHeight;
// Create target textures and FBOs for buffers
for (int i = 0; i < 4; i++)
{
if (bufferShaders[i].inUse)
{
bufferShaders[i].vpWidth = screenWidth;
bufferShaders[i].vpHeight = screenHeight;
// create texture object
glGenTextures(1, &bufferShaders[i].bufferTexture);
if (bufferShaders[i].bufferTexture == 0)
{
fputs("Failed to create buffer texture\n", stderr);
goto cleanup;
}
glBindTexture(GL_TEXTURE_2D, bufferShaders[i].bufferTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, screenWidth, screenHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// create framebuffer object
glGenFramebuffers(1, &bufferShaders[i].frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, bufferShaders[i].frameBuffer);
glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, bufferShaders[i].bufferTexture, 0);
}
}
// Set up buffer shaders
for (int i = 0; i < 4; i++)
if (bufferShaders[i].inUse)
if (!init_shader_channels(&bufferShaders[i]) || !setup_shader_program(&bufferShaders[i]))
goto cleanup;
// Set up image shader
if (!init_shader_channels(&imageShader) || !setup_shader_program(&imageShader))
goto cleanup;
//soundShader.inUse = false;
#if FEATURE_VIDEO_CAPTURE
// We must do this before initializing sound, since some formats require a specific sample rate.
if (captureFileName != NULL)
{
capturing = start_video_capture(captureFileName);
if (!capturing)
goto cleanup;
}
#endif
#if FEATURE_SOUND
if (!init_sound())
goto cleanup;
#endif
//puts("starting");
reset_timer();
#if FEATURE_SOUND
play_sound();
#endif
check_gl_errors();
// Main loop
glfwPostEmptyEvent();
while (!glfwWindowShouldClose(window))
{
glfwWaitEvents();
if (!paused)
{
glfwPostEmptyEvent(); // Pump the event queue so that we render again next frame
//printf("animTime = %.3f\tprevAnimTime = %.3f\tdelta = %.3f\tframe = %i\n", animTime, prevAnimTime, deltaTime, frame);
render();
#if FEATURE_VIDEO_CAPTURE
if (capturing)
capture_frame();
#endif
update_title_bar_info();
glfwSwapBuffers(window);
update_timer();
}
// Make w negative to signal that the mouse down event is done
if (mouseVec[3] > 0.0f)
mouseVec[3] = -mouseVec[3];
}
#if FEATURE_VIDEO_CAPTURE
if (capturing)
stop_video_capture();
#endif
retcode = EXIT_SUCCESS;
cleanup:
if (myArgv != NULL)
free(myArgv);
#if FEATURE_SOUND
cleanup_sound();
#endif
if (window != NULL)
glfwDestroyWindow(window);
glfwTerminate();
return retcode;
}
//------------------------------------------------------------------------------
// Textures
//------------------------------------------------------------------------------
static GLuint rgba_noise_texture(int width, int height)
{
GLuint texture;
uint32_t *pixels = malloc(width * height * sizeof(*pixels));
for (int i = 0; i < width * height; i++)
pixels[i] = (rand() & 0xFF) | ((rand() & 0xFF) << 8) | ((rand() & 0xFF) << 16) | ((rand() & 0xFF) << 24);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
free(pixels);
return texture;
}
static GLuint gray_noise_texture(int width, int height)
{
GLuint texture;
uint8_t *pixels = malloc(width * height * sizeof(*pixels));
for (int i = 0; i < width * height; i++)
pixels[i] = rand() & 0xFF;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, width, height, 0, GL_RED, GL_UNSIGNED_BYTE, pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
free(pixels);
return texture;
}
static GLuint image_texture(const char *filename)
{
GLuint texture;
unsigned int error;
struct plum_image *image = plum_load_image(filename, PLUM_MODE_FILENAME, PLUM_COLOR_32 | PLUM_ALPHA_INVERT, &error);
struct plum_metadata *colorinfo;
if (image == NULL)
{
fprintf(stderr, "Cannot load image '%s': %s\n", filename, plum_get_error_text(error));
return 0;
}
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
// Figure out which color format to use
colorinfo = plum_find_metadata(image, PLUM_METADATA_COLOR_DEPTH);
if (colorinfo != NULL && ((uint8_t *)colorinfo->data)[4] != 0) // grayscale
{
uint8_t *pixels = malloc(image->width * image->height);
for (int i = 0; i < image->width * image->height; i++)
pixels[i] = image->data32[i] & 0xFF;
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, image->width, image->height, 0, GL_RED, GL_UNSIGNED_BYTE, pixels);
free(pixels);
}
else // not grayscale
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image->width, image->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image->data);
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
plum_destroy_image(image);
return texture;
}
#if FEATURE_VIDEO_CAPTURE
//------------------------------------------------------------------------------
// Video capture
//------------------------------------------------------------------------------
static AVCodecContext *vidCodecCtx;
static AVCodecContext *audCodecCtx;
static AVFrame *vidFrame;
static AVFrame *audFrame;
static AVPacket *pkt;
static int videoFrameNum;
static struct SwsContext *sws;
static AVFormatContext *formatCtx;
static AVStream *vidStream;
static AVStream *audStream;
static AVCodecContext *setup_codec(AVFormatContext *formatCtx, AVStream *stream, enum AVCodecID codecId)
{
enum AVMediaType type = avcodec_get_type(codecId);
AVCodec *codec;
AVCodecContext *codecCtx;
if ((codec = avcodec_find_encoder(codecId)) == NULL)
{
fprintf(stderr, "Unable to find %s encoder\n", avcodec_get_name(codecId));
return NULL;
}
if ((codecCtx = avcodec_alloc_context3(codec)) == NULL)
{
fprintf(stderr, "Unable to allocate codec context\n");
return NULL;
}
switch (type)
{
case AVMEDIA_TYPE_VIDEO:
printf("video codec %s\n", avcodec_get_name(codecId));
codecCtx->height = screenHeight;
codecCtx->width = screenWidth;
codecCtx->pix_fmt = codec->pix_fmts[0];
codecCtx->time_base = av_make_q(1, captureFPS);
codecCtx->framerate = av_inv_q(codecCtx->time_base);
stream->time_base = codecCtx->time_base;
stream->avg_frame_rate = codecCtx->framerate;
break;
case AVMEDIA_TYPE_AUDIO:
printf("video codec %s\n", avcodec_get_name(codecId));
codecCtx->sample_fmt = codec->sample_fmts != NULL ? codec->sample_fmts[0] : AV_SAMPLE_FMT_S16;
codecCtx->sample_rate = codec->supported_samplerates != NULL ? codec->supported_samplerates[0] : soundSampleRate;
codecCtx->channel_layout = AV_CH_LAYOUT_STEREO;
codecCtx->channels = 2;
if (codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
{
codecCtx->frame_size = 1024;
puts("variable");
}
soundSampleRate = codecCtx->sample_rate;
printf("sample rate: %i\n", soundSampleRate);
break;
default:
fprintf(stderr, "%s media type not supported\n", av_get_media_type_string(type));
return NULL;
}
if (formatCtx->oformat->flags & AVFMT_GLOBALHEADER)
codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
if (avcodec_open2(codecCtx, codec, NULL) < 0)
{
fprintf(stderr, "Unable to open %s codec context\n", avcodec_get_name(codecId));
return NULL;
}
if (codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
codecCtx->frame_size = 1024;
avcodec_parameters_from_context(stream->codecpar, codecCtx);
return codecCtx;
}
static bool start_video_capture(const char *filename)
{
enum AVCodecID vcodecId;
enum AVCodecID acodecId;
int result;
// Set up container format
avformat_alloc_output_context2(&formatCtx, NULL, NULL, filename);
if (formatCtx == NULL)
return false;
// Video encoder
vcodecId = formatCtx->oformat->video_codec;
if (vcodecId != AV_CODEC_ID_NONE)
{
vidStream = avformat_new_stream(formatCtx, NULL);
if ((vidCodecCtx = setup_codec(formatCtx, vidStream, vcodecId)) == NULL)
return false;
}
// Audio encoder
acodecId = formatCtx->oformat->audio_codec;
if (soundShader.inUse && acodecId != AV_CODEC_ID_NONE)
{
audStream = avformat_new_stream(formatCtx, NULL);
if ((audCodecCtx = setup_codec(formatCtx, audStream, acodecId)) == NULL)
return false;
}
if (vidCodecCtx == NULL && audCodecCtx == NULL)
return false;
// Write container
if ((result = avio_open(&formatCtx->pb, filename, AVIO_FLAG_WRITE)) < 0)
{
fprintf(stderr, "Unable to open output file '%s': %s\n", filename, av_err2str(result));
return false;
}
if ((result = avformat_write_header(formatCtx, NULL)) < 0)
{
fprintf(stderr, "Unable to write video header: %s\n", av_err2str(result));
return false;
}
// Allocate frames
if (vidCodecCtx != NULL)
{
if ((vidFrame = av_frame_alloc()) == NULL)
{
fprintf(stderr, "Could not allocate video frame\n");
return false;
}
vidFrame->format = vidCodecCtx->pix_fmt;
vidFrame->width = vidCodecCtx->width;
vidFrame->height = vidCodecCtx->height;
if ((result = av_frame_get_buffer(vidFrame, 0)) < 0)
{
fprintf(stderr, "Could not allocate the video frame buffer: %s\n", av_err2str(result));
return false;
}
}
if (audCodecCtx != NULL)
{
if ((audFrame = av_frame_alloc()) == NULL)
{
fprintf(stderr, "Could not allocate audio frame\n");
return false;
}
audFrame->format = audCodecCtx->sample_fmt;
audFrame->nb_samples = audCodecCtx->frame_size;
audFrame->channel_layout = audCodecCtx->channel_layout;
printf("format: %i, nb_samples: %i, channel_layout: %li\n", audFrame->format, audFrame->nb_samples, audFrame->channel_layout);
if ((result = av_frame_get_buffer(audFrame, 0)) < 0)
{
fprintf(stderr, "Could not allocate the audio frame buffer: %s\n", av_err2str(result));
return false;
}
}
// Allocate packet
pkt = av_packet_alloc();
if (pkt == NULL)
return false;
pkt->stream_index = 0;
// Initialize scaler (needed for image flipping and RGB-YUV conversion
if (vidFrame != NULL)
{
sws = sws_getContext(
screenWidth, screenHeight, AV_PIX_FMT_RGB24, // source
vidFrame->width, vidFrame->height, vidFrame->format, // dest
0, NULL, NULL, NULL);
}
videoFrameNum = 0;
if (vidStream != NULL)
printf("vid stream time base: %i/%i\n", vidStream->time_base.num, vidStream->time_base.den);
if (audStream != NULL)
printf("aud stream time base: %i/%i\n", audStream->time_base.num, audStream->time_base.den);
if (vidCodecCtx != NULL)
printf("vid codec time base: %i/%i\n", vidCodecCtx->time_base.num, vidCodecCtx->time_base.den);
if (audCodecCtx != NULL)
printf("aud codec time base: %i/%i\n", audCodecCtx->time_base.num, audCodecCtx->time_base.den);
return true;
}
static void write_frame(AVCodecContext *codecCtx, AVStream *stream, AVFrame *frame, long int duration)
{
int response = avcodec_send_frame(codecCtx, frame);
while (response >= 0)
{
response = avcodec_receive_packet(codecCtx, pkt);
if (response == AVERROR(EAGAIN) || response == AVERROR_EOF)
break;
else if (response < 0)
{
fprintf(stderr, "Error receiving packet from encoder: %s", av_err2str(response));
return;
}
pkt->stream_index = stream->index;
pkt->duration = duration;
av_packet_rescale_ts(pkt, codecCtx->time_base, stream->time_base);
response = av_interleaved_write_frame(formatCtx, pkt);
if (response != 0)
{
fprintf(stderr, "Error writing frame: %s\n", av_err2str(response));
return;
}
}
}
/**
* Captures the OpenGL frame buffer and writes it to the video file
*/
static bool capture_frame(void)
{
// Capture video
if (vidCodecCtx != NULL)
{
if (av_frame_make_writable(vidFrame) == 0)
{
// Allocate buffer
size_t stride = screenWidth * 3; // size of each line (RGB)
uint8_t *pixels = malloc(screenWidth * screenHeight * 3);
uint8_t *lastLine = pixels + (screenHeight - 1) * stride;
glReadPixels(0, 0, screenWidth, screenHeight, GL_RGB, GL_UNSIGNED_BYTE, pixels);
// Copy the pixels into the frame, converting to the proper format.
// glReadPixels produces an inverted image, with the first line being the
// bottom one. By copying from the last line and specifying a negative
// stride, the image will be flipped right side up again.
sws_scale(sws,
(const uint8_t *[]){ lastLine }, // srcSlice
(int []){ -stride }, // srcStride
0, // srcSliceY
screenHeight, // srcSliceH
vidFrame->data, // dst
vidFrame->linesize); // dstStride
vidFrame->pts = videoFrameNum;
write_frame(vidCodecCtx, vidStream, vidFrame, 1);
free(pixels);
}
else
{
fputs("Error: video frame not writable\n", stderr);
return false;
}
}
// Capture audio
if (audCodecCtx != NULL)
{
static int pos; // position in the frame
static int currSample;
static int frameStart;
int end = soundSampleRate * (videoFrameNum + 1) / captureFPS;
if (end > MAX_SAMPLES)
end = MAX_SAMPLES;
assert(currSample <= end);
// fill one video frame's length of audio
if (av_frame_make_writable(audFrame) == 0)
{
while (currSample < end)
{
if (pos == audCodecCtx->frame_size)
{
// output frame
audFrame->pts = frameStart;
write_frame(audCodecCtx, audStream, audFrame, audCodecCtx->frame_size);
pos = 0;
frameStart = currSample;
}
uint8_t r = soundMap[currSample * 4 + 0];
uint8_t g = soundMap[currSample * 4 + 1];
uint8_t b = soundMap[currSample * 4 + 2];
uint8_t a = soundMap[currSample * 4 + 3];
int left = (r | (g << 8)) - 32768;
int right = (b | (a << 8)) - 32768;
switch (audCodecCtx->sample_fmt)
{
case AV_SAMPLE_FMT_S16:
((int16_t *)audFrame->data[0])[pos * 2 + 0] = left;
((int16_t *)audFrame->data[0])[pos * 2 + 1] = right;
break;
case AV_SAMPLE_FMT_S16P:
((int16_t *)audFrame->data[0])[pos] = left;
((int16_t *)audFrame->data[1])[pos] = right;
break;
case AV_SAMPLE_FMT_S32P:
((int32_t *)audFrame->data[0])[pos] = left << 16;
((int32_t *)audFrame->data[1])[pos] = right << 16;
break;
case AV_SAMPLE_FMT_FLTP:
((float *)audFrame->data[0])[pos] = (float)left / 32768.0f;
((float *)audFrame->data[1])[pos] = (float)right / 32768.0f;
break;
default:
fprintf(stderr, "Unhandled sample format %i\n", audCodecCtx->sample_fmt);
return false;
}
pos++;
currSample++;
}
}
else
{
fputs("Error: audio frame not writable\n", stderr);
return false;
}
}
videoFrameNum++;
return true;
}
static void stop_video_capture(void)
{
// flush output
if (vidStream != NULL)
write_frame(vidCodecCtx, vidStream, NULL, 0);
if (audStream != NULL)
write_frame(audCodecCtx, audStream, NULL, 0);
if (vidFrame != NULL)
av_frame_free(&vidFrame);
if (audFrame != NULL)
av_frame_free(&audFrame);
av_packet_free(&pkt);
av_write_trailer(formatCtx);
avio_close(formatCtx->pb);
avformat_free_context(formatCtx);
if (vidCodecCtx != NULL)
avcodec_free_context(&vidCodecCtx);
if (audCodecCtx != NULL)
avcodec_free_context(&audCodecCtx);
}
#endif
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment