ESP32CAM人工智能教学18
获取数据并显示
如果我们给ESP32Cam外挂一些传感器(比如温湿度传感器、超声波测距传感器、红外人体传感器等),我们怎么把ESP32Cam捕获到的数据,传递到客户端的浏览器,并在网页index.html中显示出来呢?
- 获取数据
官方示例程序CameraWebServer其实是把ESP32Cam开发板打造成一个网页服务器,为用户提供几个功能网页的服务。这样,这个程序就包含前端的网页代码(即发送到用户浏览器中执行的index.html,我们可以通过查看网页源代码获得,在前面的第8课、第11课、第17课中,我们都已经把这个前端网页代码,改成了明码了)。
除了前端的网页代码,还需要在ESP32Cam中运行服务器的后端响应代码。在服务器的后端,startCameraServer就是开启服务器的功能代码,在里面开启了几个URI,也就是开启了几个可以供用户访问的功能网页,每一个网页都对应这一个处理函数的句柄,这个就是服务器的后端。所以在这里可以看到这个程序的前端和后端程序代码了。比如我们在浏览器输入http://192.168.1.22/,其实访问的就是index这个网页,而对应的后端处理程序就是index_handler。
其实,在官方示例程序CameraWebServer中,确实有从ESP32Cam开发板中获取数据的功能,这个功能隐藏在页面下端的折叠菜单中。
//本例中的视频采用的是81端口(代码第290行左右)
//在本地网络中,可以一边查看视频,一边获取数据,两种请求同时进行
#include "esp_camera.h"
#include <WiFi.h>
#include "esp_http_server.h"
const char* ssid = "ChinaNet-xxVP";
const char* password = "123456789";
void startCameraServer();
// 这个是index.html网页的源代码
static const char mainPage[] = u8R"(
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width,initial-scale=1">
<title>ESP32 OV2460</title>
</head>
<body>
<section class="main">
<div id="content">
<div id="sidebar">
<nav id="menu">
<section id="buttons">
<button id="toggle-stream">Start Stream</button>
<button id="stggle-stream">Stop Stream</button>
</section>
<div class="input-group" id="get-reg-group">
<label for="get-reg">Reg, Mask</label>
<div class="text">
<input id="get-reg-addr" type="text" minlength="4" maxlength="6" size="6" value="0x111">
<input id="get-reg-mask" type="text" minlength="4" maxlength="6" size="6" value="0x80">
</div>
<button class="inline-button" id="get-reg">Get</button>
<div class="input-group">
<label for="get-reg-value">Value</label>
<span id="get-reg-value">0x1234</span>
</div>
</div>
</nav>
</div>
<figure>
<div id="stream-container" class="image-container hidden">
<img id="stream" src="" crossorigin>
</div>
</figure>
</div>
</section>
<script>
document.addEventListener('DOMContentLoaded', function (event) {
var baseHost = document.location.origin
var streamUrl = baseHost + ':81'
function fetchUrl(url, cb){
fetch(url)
.then(function (response) {
if (response.status !== 200) {
cb(response.status, response.statusText);
} else {
response.text().then(function(data){
cb(200, data);
}).catch(function(err) {
cb(-1, err);
});
}
})
.catch(function(err) {
cb(-1, err);
});
}
function setWindow(start_x, start_y, end_x, end_y, offset_x, offset_y, total_x, total_y, output_x, output_y, scaling, binning, cb){
fetchUrl(`${baseHost}/resolution?sx=${start_x}&sy=${start_y}&ex=${end_x}&ey=${end_y}&offx=${offset_x}&offy=${offset_y}&tx=${total_x}&ty=${total_y}&ox=${output_x}&oy=${output_y}&scale=${scaling}&binning=${binning}`, cb);
}
document
.querySelectorAll('.close')
.forEach(el => {
el.onclick = () => {
hide(el.parentNode)
}
})
const view = document.getElementById('stream')
const streamButton = document.getElementById('toggle-stream')
const streamButton2 = document.getElementById('stggle-stream')
streamButton.onclick = () => {
view.src = `${streamUrl}/stream`
show(viewContainer)
}
streamButton2.onclick = () => {
window.stop();
}
const getRegButton = document.getElementById('get-reg')
getRegButton.onclick = () => {
let reg = parseInt(document.getElementById('get-reg-addr').value);
let mask = parseInt(document.getElementById('get-reg-mask').value);
let value = document.getElementById('get-reg-value');
fetchUrl(`${baseHost}/greg?reg=${reg}&mask=${mask}`, function(code, txt){
if(code != 200){
value.innerHTML = 'Error['+code+']: '+txt;
} else {
value.innerHTML = '0x'+parseInt(txt).toString(16)+' ('+txt+')';
}
});
}
})
</script>
</body>
</html>
)";
///
//注意了!!! 这里的摄像头选择的不一样,不是AI-think !!!
// 摄像头引脚 CAMERA_MODEL_WROVER_KIT
#define PWDN_GPIO_NUM -1
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 21
#define SIOD_GPIO_NUM 26
#define SIOC_GPIO_NUM 27
#define Y9_GPIO_NUM 35
#define Y8_GPIO_NUM 34
#define Y7_GPIO_NUM 39
#define Y6_GPIO_NUM 36
#define Y5_GPIO_NUM 19
#define Y4_GPIO_NUM 18
#define Y3_GPIO_NUM 5
#define Y2_GPIO_NUM 4
#define VSYNC_GPIO_NUM 25
#define HREF_GPIO_NUM 23
#define PCLK_GPIO_NUM 22
///
// 开启调试信息
#if defined(ARDUINO_ARCH_ESP32) && defined(CONFIG_ARDUHAL_ESP_LOG)
#include "esp32-hal-log.h"
#endif
// 开启模块的存储 PSRAM
#ifdef BOARD_HAS_PSRAM
#define CONFIG_ESP_FACE_DETECT_ENABLED 1
#define CONFIG_ESP_FACE_RECOGNITION_ENABLED 0
#endif
#define PART_BOUNDARY "123456789000000000000987654321"
static const char *_STREAM_CONTENT_TYPE = "multipart/x-mixed-replace;boundary=" PART_BOUNDARY;
static const char *_STREAM_BOUNDARY = "\r\n--" PART_BOUNDARY "\r\n";
static const char *_STREAM_PART = "Content-Type: image/jpeg\r\nContent-Length: %u\r\nX-Timestamp: %d.%06d\r\n\r\n";
httpd_handle_t stream_httpd = NULL;
httpd_handle_t camera_httpd = NULL;
static esp_err_t stream_handler(httpd_req_t *req)
{
camera_fb_t *fb = NULL;
struct timeval _timestamp;
esp_err_t res = ESP_OK;
size_t _jpg_buf_len = 0;
uint8_t *_jpg_buf = NULL;
char *part_buf[128];
res = httpd_resp_set_type(req, _STREAM_CONTENT_TYPE);
if (res != ESP_OK)
{
return res;
}
httpd_resp_set_hdr(req, "Access-Control-Allow-Origin", "*");
httpd_resp_set_hdr(req, "X-Framerate", "60");
while (true)
{
fb = esp_camera_fb_get();
if (!fb)
{
log_e("Camera capture failed");
res = ESP_FAIL;
}
else
{ // 从摄像头获取图片的数据
_jpg_buf_len = fb->len;
_jpg_buf = fb->buf;
}
if (res == ESP_OK)
{
res = httpd_resp_send_chunk(req, _STREAM_BOUNDARY, strlen(_STREAM_BOUNDARY));
}
if (res == ESP_OK)
{
size_t hlen = snprintf((char *)part_buf, 128, _STREAM_PART, _jpg_buf_len, _timestamp.tv_sec, _timestamp.tv_usec);
res = httpd_resp_send_chunk(req, (const char *)part_buf, hlen);
}
if (res == ESP_OK)
{
res = httpd_resp_send_chunk(req, (const char *)_jpg_buf, _jpg_buf_len);
}
// 清除相关的内存
if (fb)
{
esp_camera_fb_return(fb);
fb = NULL;
_jpg_buf = NULL;
}
else if (_jpg_buf)
{
free(_jpg_buf);
_jpg_buf = NULL;
}
if (res != ESP_OK)
{
log_e("Send frame failed");
break;
}
}
return res;
}
static esp_err_t greg_handler(httpd_req_t *req)
{
//我们以发送数字值100为例,可以换成传感器捕获的数字
int res = 100;
char buffer[20];
const char * val = itoa(res, buffer, 10);
httpd_resp_set_hdr(req, "Access-Control-Allow-Origin", "*");
return httpd_resp_send(req, val, strlen(val));
}
static esp_err_t index_handler(httpd_req_t *req)
{
httpd_resp_set_type(req, "text/html");
//httpd_resp_set_hdr(req, "Content-Encoding", "gzip");
httpd_resp_set_hdr(req, "Content-Encoding", "html");
sensor_t *s = esp_camera_sensor_get();
if (s != NULL) {
//return httpd_resp_send(req, (const char *)index_ov2640_html_gz, index_ov2640_html_gz_len);
const char* charHtml = mainPage;
return httpd_resp_send(req, (const char *)charHtml, strlen(charHtml));
} else {
log_e("Camera sensor not found");
return httpd_resp_send_500(req);
}
}
void startCameraServer()
{
httpd_config_t config = HTTPD_DEFAULT_CONFIG();
config.max_uri_handlers = 16;
httpd_uri_t index_uri = {
.uri = "/",
.method = HTTP_GET,
.handler = index_handler,
.user_ctx = NULL
};
httpd_uri_t greg_uri = {
.uri = "/greg",
.method = HTTP_GET,
.handler = greg_handler,
.user_ctx = NULL
};
httpd_uri_t stream_uri = {
.uri = "/stream",
.method = HTTP_GET,
.handler = stream_handler,
.user_ctx = NULL
};
log_i("Starting web server on port: '%d'", config.server_port);
if (httpd_start(&camera_httpd, &config) == ESP_OK)
{
httpd_register_uri_handler(camera_httpd, &index_uri);
httpd_register_uri_handler(camera_httpd, &greg_uri);
}
config.server_port += 1;
config.ctrl_port += 1;
log_i("Starting stream server on port: '%d'", config.server_port);
if (httpd_start(&stream_httpd, &config) == ESP_OK)
{
httpd_register_uri_handler(stream_httpd, &stream_uri);
}
}
///
void setup() {
Serial.begin(115200);
Serial.setDebugOutput(true);
Serial.println();
camera_config_t config;
config.ledc_channel = LEDC_CHANNEL_0;
config.ledc_timer = LEDC_TIMER_0;
config.pin_d0 = Y2_GPIO_NUM;
config.pin_d1 = Y3_GPIO_NUM;
config.pin_d2 = Y4_GPIO_NUM;
config.pin_d3 = Y5_GPIO_NUM;
config.pin_d4 = Y6_GPIO_NUM;
config.pin_d5 = Y7_GPIO_NUM;
config.pin_d6 = Y8_GPIO_NUM;
config.pin_d7 = Y9_GPIO_NUM;
config.pin_xclk = XCLK_GPIO_NUM;
config.pin_pclk = PCLK_GPIO_NUM;
config.pin_vsync = VSYNC_GPIO_NUM;
config.pin_href = HREF_GPIO_NUM;
config.pin_sccb_sda = SIOD_GPIO_NUM;
config.pin_sccb_scl = SIOC_GPIO_NUM;
config.pin_pwdn = PWDN_GPIO_NUM;
config.pin_reset = RESET_GPIO_NUM;
config.xclk_freq_hz = 20000000;
config.frame_size = FRAMESIZE_UXGA;
config.pixel_format = PIXFORMAT_JPEG; // for streaming
//config.pixel_format = PIXFORMAT_RGB565; // for face detection/recognition
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.jpeg_quality = 12;
config.fb_count = 1;
// if PSRAM IC present, init with UXGA resolution and higher JPEG quality
// for larger pre-allocated frame buffer.
if(config.pixel_format == PIXFORMAT_JPEG){
if(psramFound()){
config.jpeg_quality = 10;
config.fb_count = 2;
config.grab_mode = CAMERA_GRAB_LATEST;
} else {
// Limit the frame size when PSRAM is not available
config.frame_size = FRAMESIZE_SVGA;
config.fb_location = CAMERA_FB_IN_DRAM;
}
}
// camera init
esp_err_t err = esp_camera_init(&config);
if (err != ESP_OK) {
Serial.printf("Camera init failed with error 0x%x", err);
return;
}
sensor_t * s = esp_camera_sensor_get();
// drop down frame size for higher initial frame rate
if(config.pixel_format == PIXFORMAT_JPEG){
s->set_framesize(s, FRAMESIZE_QVGA);
}
WiFi.begin(ssid, password);
WiFi.setSleep(false);
while (WiFi.status() != WL_CONNECTED) {
delay(500);
Serial.print(".");
}
Serial.println("");
Serial.println("WiFi connected");
startCameraServer();
Serial.print("Camera Ready! Use 'http://");
Serial.print(WiFi.localIP());
Serial.println("' to connect");
}
void loop() {
// Do nothing. Everything is done in another task by the web server
delay(10000);
}
在这个修改程序中,我们保留了获取数据的Get按钮,当用户按动页面上的Get按钮时,会方位服务器的 /greg的URI(页面),然后会触发后端的greg_handler的处理函数,然后ESP32Cam把一个100的数字,发送给客户端,在客户端的浏览器中,会收到并显示这个数字100。
- 视频与控制的冲突问题分析
在前面的修改程序中,视频页面 /stream使用的是81端口,因此在网页中,视频流和控制是可以同时进行的。这个我们可以通俗地理解为,ESP32Cam开发板开通了两个通道,一个是80端口,用于响应用户的按钮,发送接收用户的数据;另外专门开辟了一个81端口,专门用户把摄像头的图像输出给用户。这样用户在浏览器端,就能一边打开视频,一边发送或接收数据,两种需求都能同时满足。(在本地连接的时候,也就是手机和ESP32Cam连接的是同一个WiFi)
如果我们想把ESP32Cam推到公网上面去的话(第17课内容),因为公网的FRP一条隧道只能连接一个端口,所以只能把视频服务的 /stream和获取数据的 /greg等共用一个80端口。
这样能解决视频推出到公网的问题,但是也造成了一个问题,就是在后端的视频处理函数stream_handle中,程序陷入的循环体while中无法自拔,(ESP32Cam会不停歇地给浏览器发送摄像头的图片)。因此当我们打开视频的时候,这时候是无法实现发送或获取数据的操作的,这些请求都会因为后端陷入循环而被搁置,只有等到关闭了视频,后端跳出了循环,这样才能处理用户的请求的。
//本例中的视频采用的是80端口(代码第290行左右)
//在开放单个端口的公网中,查看视频和获取数据不能同时进行,只能停下视频才能获取数据
#include "esp_camera.h"
#include <WiFi.h>
#include "esp_http_server.h"
const char* ssid = "ChinaNet-xxVP";
const char* password = "123456789";
void startCameraServer();
// 这个是index.html网页的源代码
static const char mainPage[] = u8R"(
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width,initial-scale=1">
<title>ESP32 OV2460</title>
</head>
<body>
<section class="main">
<div id="content">
<div id="sidebar">
<nav id="menu">
<section id="buttons">
<button id="toggle-stream">Start Stream</button>
<button id="stggle-stream">Stop Stream</button>
</section>
<div class="input-group" id="get-reg-group">
<label for="get-reg">Reg, Mask</label>
<div class="text">
<input id="get-reg-addr" type="text" minlength="4" maxlength="6" size="6" value="0x111">
<input id="get-reg-mask" type="text" minlength="4" maxlength="6" size="6" value="0x80">
</div>
<button class="inline-button" id="get-reg">Get</button>
<div class="input-group">
<label for="get-reg-value">Value</label>
<span id="get-reg-value">0x1234</span>
</div>
</div>
</nav>
</div>
<figure>
<div id="stream-container" class="image-container hidden">
<img id="stream" src="" crossorigin>
</div>
</figure>
</div>
</section>
<script>
document.addEventListener('DOMContentLoaded', function (event) {
var baseHost = document.location.origin
function fetchUrl(url, cb){
fetch(url)
.then(function (response) {
if (response.status !== 200) {
cb(response.status, response.statusText);
} else {
response.text().then(function(data){
cb(200, data);
}).catch(function(err) {
cb(-1, err);
});
}
})
.catch(function(err) {
cb(-1, err);
});
}
function setWindow(start_x, start_y, end_x, end_y, offset_x, offset_y, total_x, total_y, output_x, output_y, scaling, binning, cb){
fetchUrl(`${baseHost}/resolution?sx=${start_x}&sy=${start_y}&ex=${end_x}&ey=${end_y}&offx=${offset_x}&offy=${offset_y}&tx=${total_x}&ty=${total_y}&ox=${output_x}&oy=${output_y}&scale=${scaling}&binning=${binning}`, cb);
}
document
.querySelectorAll('.close')
.forEach(el => {
el.onclick = () => {
hide(el.parentNode)
}
})
const view = document.getElementById('stream')
const streamButton = document.getElementById('toggle-stream')
const streamButton2 = document.getElementById('stggle-stream')
streamButton.onclick = () => {
view.src = `${baseHost}/capture`
show(viewContainer)
}
streamButton2.onclick = () => {
window.stop();
}
const getRegButton = document.getElementById('get-reg')
getRegButton.onclick = () => {
let reg = parseInt(document.getElementById('get-reg-addr').value);
let mask = parseInt(document.getElementById('get-reg-mask').value);
let value = document.getElementById('get-reg-value');
fetchUrl(`${baseHost}/greg?reg=${reg}&mask=${mask}`, function(code, txt){
if(code != 200){
value.innerHTML = 'Error['+code+']: '+txt;
} else {
value.innerHTML = '0x'+parseInt(txt).toString(16)+' ('+txt+')';
}
});
}
})
</script>
</body>
</html>
)";
///
//注意了!!! 这里的摄像头选择的不一样,不是AI-think !!!
// 摄像头引脚 CAMERA_MODEL_WROVER_KIT
#define PWDN_GPIO_NUM -1
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 21
#define SIOD_GPIO_NUM 26
#define SIOC_GPIO_NUM 27
#define Y9_GPIO_NUM 35
#define Y8_GPIO_NUM 34
#define Y7_GPIO_NUM 39
#define Y6_GPIO_NUM 36
#define Y5_GPIO_NUM 19
#define Y4_GPIO_NUM 18
#define Y3_GPIO_NUM 5
#define Y2_GPIO_NUM 4
#define VSYNC_GPIO_NUM 25
#define HREF_GPIO_NUM 23
#define PCLK_GPIO_NUM 22
///
// 开启调试信息
#if defined(ARDUINO_ARCH_ESP32) && defined(CONFIG_ARDUHAL_ESP_LOG)
#include "esp32-hal-log.h"
#endif
// 开启模块的存储 PSRAM
#ifdef BOARD_HAS_PSRAM
#define CONFIG_ESP_FACE_DETECT_ENABLED 1
#define CONFIG_ESP_FACE_RECOGNITION_ENABLED 0
#endif
#define PART_BOUNDARY "123456789000000000000987654321"
static const char *_STREAM_CONTENT_TYPE = "multipart/x-mixed-replace;boundary=" PART_BOUNDARY;
static const char *_STREAM_BOUNDARY = "\r\n--" PART_BOUNDARY "\r\n";
static const char *_STREAM_PART = "Content-Type: image/jpeg\r\nContent-Length: %u\r\nX-Timestamp: %d.%06d\r\n\r\n";
httpd_handle_t camera_httpd = NULL;
httpd_handle_t stream_httpd = NULL;
static esp_err_t capture_handler(httpd_req_t *req)
{
camera_fb_t *fb = NULL;
struct timeval _timestamp;
esp_err_t res = ESP_OK;
size_t _jpg_buf_len = 0;
uint8_t *_jpg_buf = NULL;
char *part_buf[128];
res = httpd_resp_set_type(req, _STREAM_CONTENT_TYPE);
if (res != ESP_OK)
{
return res;
}
httpd_resp_set_hdr(req, "Access-Control-Allow-Origin", "*");
httpd_resp_set_hdr(req, "X-Framerate", "60");
while (true)
{
fb = esp_camera_fb_get();
if (!fb)
{
log_e("Camera capture failed");
res = ESP_FAIL;
}
else
{ // 从摄像头获取图片的数据
_jpg_buf_len = fb->len;
_jpg_buf = fb->buf;
}
if (res == ESP_OK)
{
res = httpd_resp_send_chunk(req, _STREAM_BOUNDARY, strlen(_STREAM_BOUNDARY));
}
if (res == ESP_OK)
{
size_t hlen = snprintf((char *)part_buf, 128, _STREAM_PART, _jpg_buf_len, _timestamp.tv_sec, _timestamp.tv_usec);
res = httpd_resp_send_chunk(req, (const char *)part_buf, hlen);
}
if (res == ESP_OK)
{
res = httpd_resp_send_chunk(req, (const char *)_jpg_buf, _jpg_buf_len);
}
// 清除相关的内存
if (fb)
{
esp_camera_fb_return(fb);
fb = NULL;
_jpg_buf = NULL;
}
else if (_jpg_buf)
{
free(_jpg_buf);
_jpg_buf = NULL;
}
if (res != ESP_OK)
{
log_e("Send frame failed");
break;
}
}
return res;
}
static esp_err_t greg_handler(httpd_req_t *req)
{
//我们以发送数字值100为例,可以换成传感器捕获的数字
int res = 100;
char buffer[20];
const char * val = itoa(res, buffer, 10);
httpd_resp_set_hdr(req, "Access-Control-Allow-Origin", "*");
return httpd_resp_send(req, val, strlen(val));
}
static esp_err_t index_handler(httpd_req_t *req)
{
httpd_resp_set_type(req, "text/html");
//httpd_resp_set_hdr(req, "Content-Encoding", "gzip");
httpd_resp_set_hdr(req, "Content-Encoding", "html");
sensor_t *s = esp_camera_sensor_get();
if (s != NULL) {
//return httpd_resp_send(req, (const char *)index_ov2640_html_gz, index_ov2640_html_gz_len);
const char* charHtml = mainPage;
return httpd_resp_send(req, (const char *)charHtml, strlen(charHtml));
} else {
log_e("Camera sensor not found");
return httpd_resp_send_500(req);
}
}
void startCameraServer()
{
httpd_config_t config = HTTPD_DEFAULT_CONFIG();
config.max_uri_handlers = 16;
httpd_uri_t index_uri = {
.uri = "/",
.method = HTTP_GET,
.handler = index_handler,
.user_ctx = NULL
};
httpd_uri_t greg_uri = {
.uri = "/greg",
.method = HTTP_GET,
.handler = greg_handler,
.user_ctx = NULL
};
httpd_uri_t capture_uri = {
.uri = "/capture",
.method = HTTP_GET,
.handler = capture_handler,
.user_ctx = NULL
};
log_i("Starting web server on port: '%d'", config.server_port);
if (httpd_start(&camera_httpd, &config) == ESP_OK)
{
httpd_register_uri_handler(camera_httpd, &index_uri);
httpd_register_uri_handler(camera_httpd, &greg_uri);
httpd_register_uri_handler(camera_httpd, &capture_uri);
}
}
///
void setup() {
Serial.begin(115200);
Serial.setDebugOutput(true);
Serial.println();
camera_config_t config;
config.ledc_channel = LEDC_CHANNEL_0;
config.ledc_timer = LEDC_TIMER_0;
config.pin_d0 = Y2_GPIO_NUM;
config.pin_d1 = Y3_GPIO_NUM;
config.pin_d2 = Y4_GPIO_NUM;
config.pin_d3 = Y5_GPIO_NUM;
config.pin_d4 = Y6_GPIO_NUM;
config.pin_d5 = Y7_GPIO_NUM;
config.pin_d6 = Y8_GPIO_NUM;
config.pin_d7 = Y9_GPIO_NUM;
config.pin_xclk = XCLK_GPIO_NUM;
config.pin_pclk = PCLK_GPIO_NUM;
config.pin_vsync = VSYNC_GPIO_NUM;
config.pin_href = HREF_GPIO_NUM;
config.pin_sccb_sda = SIOD_GPIO_NUM;
config.pin_sccb_scl = SIOC_GPIO_NUM;
config.pin_pwdn = PWDN_GPIO_NUM;
config.pin_reset = RESET_GPIO_NUM;
config.xclk_freq_hz = 20000000;
config.frame_size = FRAMESIZE_UXGA;
config.pixel_format = PIXFORMAT_JPEG; // for streaming
//config.pixel_format = PIXFORMAT_RGB565; // for face detection/recognition
config.grab_mode = CAMERA_GRAB_WHEN_EMPTY;
config.fb_location = CAMERA_FB_IN_PSRAM;
config.jpeg_quality = 12;
config.fb_count = 1;
// if PSRAM IC present, init with UXGA resolution and higher JPEG quality
// for larger pre-allocated frame buffer.
if(config.pixel_format == PIXFORMAT_JPEG){
if(psramFound()){
config.jpeg_quality = 10;
config.fb_count = 2;
config.grab_mode = CAMERA_GRAB_LATEST;
} else {
// Limit the frame size when PSRAM is not available
config.frame_size = FRAMESIZE_SVGA;
config.fb_location = CAMERA_FB_IN_DRAM;
}
}
// camera init
esp_err_t err = esp_camera_init(&config);
if (err != ESP_OK) {
Serial.printf("Camera init failed with error 0x%x", err);
return;
}
sensor_t * s = esp_camera_sensor_get();
// drop down frame size for higher initial frame rate
if(config.pixel_format == PIXFORMAT_JPEG){
s->set_framesize(s, FRAMESIZE_QVGA);
}
WiFi.begin(ssid, password);
WiFi.setSleep(false);
while (WiFi.status() != WL_CONNECTED) {
delay(500);
Serial.print(".");
}
Serial.println("");
Serial.println("WiFi connected");
startCameraServer();
Serial.print("Camera Ready! Use 'http://");
Serial.print(WiFi.localIP());
Serial.println("' to connect");
}
void loop() {
// Do nothing. Everything is done in another task by the web server
delay(10000);
}
这个问题的原因是HTTP请求自身的缺陷造成的,HTTP请求属于半双工的工作模式,在每一次的请求响应过程,只能是单向的数据流(如果是请求,用户端把数据发送给服务端;如果是响应,则是服务器端把数据发送给客户端)。
如何解决ESP32Cam视频的81端口,与内网穿透仅有一个80端口的冲突呢?其实这里有两种方法:一个是更改视频为80端口,一个是更改为全双工的webSock连接模式。
这个我们会在后面的课时中进一步详解的。