Skip to content

Instantly share code, notes, and snippets.

@me-no-dev
Last active July 22, 2024 17:17
Show Gist options
  • Save me-no-dev/d34fba51a8f059ac559bf62002e61aa3 to your computer and use it in GitHub Desktop.
Save me-no-dev/d34fba51a8f059ac559bf62002e61aa3 to your computer and use it in GitHub Desktop.
#include "Arduino.h"
#include "esp_camera.h"
#include "ESPAsyncWebServer.h"
typedef struct {
camera_fb_t * fb;
size_t index;
} camera_frame_t;
#define PART_BOUNDARY "123456789000000000000987654321"
static const char* STREAM_CONTENT_TYPE = "multipart/x-mixed-replace;boundary=" PART_BOUNDARY;
static const char* STREAM_BOUNDARY = "\r\n--" PART_BOUNDARY "\r\n";
static const char* STREAM_PART = "Content-Type: %s\r\nContent-Length: %u\r\n\r\n";
static const char * JPG_CONTENT_TYPE = "image/jpeg";
static const char * BMP_CONTENT_TYPE = "image/x-windows-bmp";
class AsyncBufferResponse: public AsyncAbstractResponse {
private:
uint8_t * _buf;
size_t _len;
size_t _index;
public:
AsyncBufferResponse(uint8_t * buf, size_t len, const char * contentType){
_buf = buf;
_len = len;
_callback = nullptr;
_code = 200;
_contentLength = _len;
_contentType = contentType;
_index = 0;
}
~AsyncBufferResponse(){
if(_buf != nullptr){
free(_buf);
}
}
bool _sourceValid() const { return _buf != nullptr; }
virtual size_t _fillBuffer(uint8_t *buf, size_t maxLen) override{
size_t ret = _content(buf, maxLen, _index);
if(ret != RESPONSE_TRY_AGAIN){
_index += ret;
}
return ret;
}
size_t _content(uint8_t *buffer, size_t maxLen, size_t index){
memcpy(buffer, _buf+index, maxLen);
if((index+maxLen) == _len){
free(_buf);
_buf = nullptr;
}
return maxLen;
}
};
class AsyncFrameResponse: public AsyncAbstractResponse {
private:
camera_fb_t * fb;
size_t _index;
public:
AsyncFrameResponse(camera_fb_t * frame, const char * contentType){
_callback = nullptr;
_code = 200;
_contentLength = frame->len;
_contentType = contentType;
_index = 0;
fb = frame;
}
~AsyncFrameResponse(){
if(fb != nullptr){
esp_camera_fb_return(fb);
}
}
bool _sourceValid() const { return fb != nullptr; }
virtual size_t _fillBuffer(uint8_t *buf, size_t maxLen) override{
size_t ret = _content(buf, maxLen, _index);
if(ret != RESPONSE_TRY_AGAIN){
_index += ret;
}
return ret;
}
size_t _content(uint8_t *buffer, size_t maxLen, size_t index){
memcpy(buffer, fb->buf+index, maxLen);
if((index+maxLen) == fb->len){
esp_camera_fb_return(fb);
fb = nullptr;
}
return maxLen;
}
};
class AsyncJpegStreamResponse: public AsyncAbstractResponse {
private:
camera_frame_t _frame;
size_t _index;
size_t _jpg_buf_len;
uint8_t * _jpg_buf;
uint64_t lastAsyncRequest;
public:
AsyncJpegStreamResponse(){
_callback = nullptr;
_code = 200;
_contentLength = 0;
_contentType = STREAM_CONTENT_TYPE;
_sendContentLength = false;
_chunked = true;
_index = 0;
_jpg_buf_len = 0;
_jpg_buf = NULL;
lastAsyncRequest = 0;
memset(&_frame, 0, sizeof(camera_frame_t));
}
~AsyncJpegStreamResponse(){
if(_frame.fb){
if(_frame.fb->format != PIXFORMAT_JPEG){
free(_jpg_buf);
}
esp_camera_fb_return(_frame.fb);
}
}
bool _sourceValid() const {
return true;
}
virtual size_t _fillBuffer(uint8_t *buf, size_t maxLen) override {
size_t ret = _content(buf, maxLen, _index);
if(ret != RESPONSE_TRY_AGAIN){
_index += ret;
}
return ret;
}
size_t _content(uint8_t *buffer, size_t maxLen, size_t index){
if(!_frame.fb || _frame.index == _jpg_buf_len){
if(index && _frame.fb){
uint64_t end = (uint64_t)micros();
int fp = (end - lastAsyncRequest) / 1000;
log_printf("Size: %uKB, Time: %ums (%.1ffps)\n", _jpg_buf_len/1024, fp);
lastAsyncRequest = end;
if(_frame.fb->format != PIXFORMAT_JPEG){
free(_jpg_buf);
}
esp_camera_fb_return(_frame.fb);
_frame.fb = NULL;
_jpg_buf_len = 0;
_jpg_buf = NULL;
}
if(maxLen < (strlen(STREAM_BOUNDARY) + strlen(STREAM_PART) + strlen(JPG_CONTENT_TYPE) + 8)){
//log_w("Not enough space for headers");
return RESPONSE_TRY_AGAIN;
}
//get frame
_frame.index = 0;
_frame.fb = esp_camera_fb_get();
if (_frame.fb == NULL) {
log_e("Camera frame failed");
return 0;
}
if(_frame.fb->format != PIXFORMAT_JPEG){
unsigned long st = millis();
bool jpeg_converted = frame2jpg(_frame.fb, 80, &_jpg_buf, &_jpg_buf_len);
if(!jpeg_converted){
log_e("JPEG compression failed");
esp_camera_fb_return(_frame.fb);
_frame.fb = NULL;
_jpg_buf_len = 0;
_jpg_buf = NULL;
return 0;
}
log_i("JPEG: %lums, %uB", millis() - st, _jpg_buf_len);
} else {
_jpg_buf_len = _frame.fb->len;
_jpg_buf = _frame.fb->buf;
}
//send boundary
size_t blen = 0;
if(index){
blen = strlen(STREAM_BOUNDARY);
memcpy(buffer, STREAM_BOUNDARY, blen);
buffer += blen;
}
//send header
size_t hlen = sprintf((char *)buffer, STREAM_PART, JPG_CONTENT_TYPE, _jpg_buf_len);
buffer += hlen;
//send frame
hlen = maxLen - hlen - blen;
if(hlen > _jpg_buf_len){
maxLen -= hlen - _jpg_buf_len;
hlen = _jpg_buf_len;
}
memcpy(buffer, _jpg_buf, hlen);
_frame.index += hlen;
return maxLen;
}
size_t available = _jpg_buf_len - _frame.index;
if(maxLen > available){
maxLen = available;
}
memcpy(buffer, _jpg_buf+_frame.index, maxLen);
_frame.index += maxLen;
return maxLen;
}
};
void sendBMP(AsyncWebServerRequest *request){
camera_fb_t * fb = esp_camera_fb_get();
if (fb == NULL) {
log_e("Camera frame failed");
request->send(501);
return;
}
uint8_t * buf = NULL;
size_t buf_len = 0;
unsigned long st = millis();
bool converted = frame2bmp(fb, &buf, &buf_len);
log_i("BMP: %lums, %uB", millis() - st, buf_len);
esp_camera_fb_return(fb);
if(!converted){
request->send(501);
return;
}
AsyncBufferResponse * response = new AsyncBufferResponse(buf, buf_len, BMP_CONTENT_TYPE);
if (response == NULL) {
log_e("Response alloc failed");
request->send(501);
return;
}
response->addHeader("Access-Control-Allow-Origin", "*");
request->send(response);
}
void sendJpg(AsyncWebServerRequest *request){
camera_fb_t * fb = esp_camera_fb_get();
if (fb == NULL) {
log_e("Camera frame failed");
request->send(501);
return;
}
if(fb->format == PIXFORMAT_JPEG){
AsyncFrameResponse * response = new AsyncFrameResponse(fb, JPG_CONTENT_TYPE);
if (response == NULL) {
log_e("Response alloc failed");
request->send(501);
return;
}
response->addHeader("Access-Control-Allow-Origin", "*");
request->send(response);
return;
}
size_t jpg_buf_len = 0;
uint8_t * jpg_buf = NULL;
unsigned long st = millis();
bool jpeg_converted = frame2jpg(fb, 80, &jpg_buf, &jpg_buf_len);
esp_camera_fb_return(fb);
if(!jpeg_converted){
log_e("JPEG compression failed: %lu", millis());
request->send(501);
return;
}
log_i("JPEG: %lums, %uB", millis() - st, jpg_buf_len);
AsyncBufferResponse * response = new AsyncBufferResponse(jpg_buf, jpg_buf_len, JPG_CONTENT_TYPE);
if (response == NULL) {
log_e("Response alloc failed");
request->send(501);
return;
}
response->addHeader("Access-Control-Allow-Origin", "*");
request->send(response);
}
void streamJpg(AsyncWebServerRequest *request){
AsyncJpegStreamResponse *response = new AsyncJpegStreamResponse();
if(!response){
request->send(501);
return;
}
response->addHeader("Access-Control-Allow-Origin", "*");
request->send(response);
}
void getCameraStatus(AsyncWebServerRequest *request){
static char json_response[1024];
sensor_t * s = esp_camera_sensor_get();
if(s == NULL){
request->send(501);
return;
}
char * p = json_response;
*p++ = '{';
p+=sprintf(p, "\"framesize\":%u,", s->status.framesize);
p+=sprintf(p, "\"quality\":%u,", s->status.quality);
p+=sprintf(p, "\"brightness\":%d,", s->status.brightness);
p+=sprintf(p, "\"contrast\":%d,", s->status.contrast);
p+=sprintf(p, "\"saturation\":%d,", s->status.saturation);
p+=sprintf(p, "\"sharpness\":%d,", s->status.sharpness);
p+=sprintf(p, "\"special_effect\":%u,", s->status.special_effect);
p+=sprintf(p, "\"wb_mode\":%u,", s->status.wb_mode);
p+=sprintf(p, "\"awb\":%u,", s->status.awb);
p+=sprintf(p, "\"awb_gain\":%u,", s->status.awb_gain);
p+=sprintf(p, "\"aec\":%u,", s->status.aec);
p+=sprintf(p, "\"aec2\":%u,", s->status.aec2);
p+=sprintf(p, "\"denoise\":%u,", s->status.denoise);
p+=sprintf(p, "\"ae_level\":%d,", s->status.ae_level);
p+=sprintf(p, "\"aec_value\":%u,", s->status.aec_value);
p+=sprintf(p, "\"agc\":%u,", s->status.agc);
p+=sprintf(p, "\"agc_gain\":%u,", s->status.agc_gain);
p+=sprintf(p, "\"gainceiling\":%u,", s->status.gainceiling);
p+=sprintf(p, "\"bpc\":%u,", s->status.bpc);
p+=sprintf(p, "\"wpc\":%u,", s->status.wpc);
p+=sprintf(p, "\"raw_gma\":%u,", s->status.raw_gma);
p+=sprintf(p, "\"lenc\":%u,", s->status.lenc);
p+=sprintf(p, "\"hmirror\":%u,", s->status.hmirror);
p+=sprintf(p, "\"vflip\":%u,", s->status.vflip);
p+=sprintf(p, "\"dcw\":%u,", s->status.dcw);
p+=sprintf(p, "\"colorbar\":%u", s->status.colorbar);
*p++ = '}';
*p++ = 0;
AsyncWebServerResponse * response = request->beginResponse(200, "application/json", json_response);
response->addHeader("Access-Control-Allow-Origin", "*");
request->send(response);
}
void setCameraVar(AsyncWebServerRequest *request){
if(!request->hasArg("var") || !request->hasArg("val")){
request->send(404);
return;
}
String var = request->arg("var");
const char * variable = var.c_str();
int val = atoi(request->arg("val").c_str());
sensor_t * s = esp_camera_sensor_get();
if(s == NULL){
request->send(501);
return;
}
int res = 0;
if(!strcmp(variable, "framesize")) res = s->set_framesize(s, (framesize_t)val);
else if(!strcmp(variable, "quality")) res = s->set_quality(s, val);
else if(!strcmp(variable, "contrast")) res = s->set_contrast(s, val);
else if(!strcmp(variable, "brightness")) res = s->set_brightness(s, val);
else if(!strcmp(variable, "saturation")) res = s->set_saturation(s, val);
else if(!strcmp(variable, "sharpness")) res = s->set_sharpness(s, val);
else if(!strcmp(variable, "gainceiling")) res = s->set_gainceiling(s, (gainceiling_t)val);
else if(!strcmp(variable, "colorbar")) res = s->set_colorbar(s, val);
else if(!strcmp(variable, "awb")) res = s->set_whitebal(s, val);
else if(!strcmp(variable, "agc")) res = s->set_gain_ctrl(s, val);
else if(!strcmp(variable, "aec")) res = s->set_exposure_ctrl(s, val);
else if(!strcmp(variable, "hmirror")) res = s->set_hmirror(s, val);
else if(!strcmp(variable, "vflip")) res = s->set_vflip(s, val);
else if(!strcmp(variable, "awb_gain")) res = s->set_awb_gain(s, val);
else if(!strcmp(variable, "agc_gain")) res = s->set_agc_gain(s, val);
else if(!strcmp(variable, "aec_value")) res = s->set_aec_value(s, val);
else if(!strcmp(variable, "aec2")) res = s->set_aec2(s, val);
else if(!strcmp(variable, "denoise")) res = s->set_denoise(s, val);
else if(!strcmp(variable, "dcw")) res = s->set_dcw(s, val);
else if(!strcmp(variable, "bpc")) res = s->set_bpc(s, val);
else if(!strcmp(variable, "wpc")) res = s->set_wpc(s, val);
else if(!strcmp(variable, "raw_gma")) res = s->set_raw_gma(s, val);
else if(!strcmp(variable, "lenc")) res = s->set_lenc(s, val);
else if(!strcmp(variable, "special_effect")) res = s->set_special_effect(s, val);
else if(!strcmp(variable, "wb_mode")) res = s->set_wb_mode(s, val);
else if(!strcmp(variable, "ae_level")) res = s->set_ae_level(s, val);
else {
log_e("unknown setting %s", var.c_str());
request->send(404);
return;
}
log_d("Got setting %s with value %d. Res: %d", var.c_str(), val, res);
AsyncWebServerResponse * response = request->beginResponse(200);
response->addHeader("Access-Control-Allow-Origin", "*");
request->send(response);
}
AsyncWebServer server(80);
void setup(){
Serial.begin(115200);
Serial.setDebugOutput(true);
server.on("/bmp", HTTP_GET, sendBMP);
server.on("/capture", HTTP_GET, sendJpg);
server.on("/stream", HTTP_GET, streamJpg);
server.on("/control", HTTP_GET, setCameraVar);
server.on("/status", HTTP_GET, getCameraStatus);
server.begin();
}
@spawn451
Copy link

spawn451 commented Aug 7, 2023

Seems _chunked = false; allows to make it works with VLC

@zekageri
Copy link

zekageri commented Sep 9, 2023

Can it work on websockets?

@javierandrango
Copy link

I'm learning web development and as a practice project, I developed my version of the web application to stream video, you can check the resulting project on my GitHub, I used an ESP32-cam with an OV2640 sensor camera, PlatformIO-VScode instead of ArduinoIDE and littleFS to save web files (.html, .js, .css, .svg) to the filesystem.

Link: https://github.com/javierandrango/microcontrollers/tree/main/ESP32-CAM/video-streaming

Thanks @me-no-dev, your code became my project's starting point.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment