文章目录
概要
记录一次esp32cam 推流到公网的学习,本来是想找rstp跟rmtp推流能力不足放弃了,最后采用了图传的方式上传。
这里记录一下自己学习的几种方式,以免忘记
1.esp32cam+nodejs+python(tcp)
esp32 cam 的代码:
这里是将esp32拍摄的图片流一张一张的上传到tcp服务器中。
/* 网络调试助手 https://soft.3dmgame.com/down/213757.html */ #include <Arduino.h> #include <WiFi.h> #include "esp_camera.h" #include <vector> const char *ssid = "*******";//wifi名称 const char *password = "--------------";//wifi密码 const IPAddress serverIP(192,168,0,2); //欲访问的地址 uint16_t serverPort = 8080; //服务器端口号 #define maxcache 1430 WiFiClient client; //声明一个客户端对象,用于与服务器进行连接 //CAMERA_MODEL_AI_THINKER类型摄像头的引脚定义 #define PWDN_GPIO_NUM 32 #define RESET_GPIO_NUM -1 #define XCLK_GPIO_NUM 0 #define SIOD_GPIO_NUM 26 #define SIOC_GPIO_NUM 27 #define Y9_GPIO_NUM 35 #define Y8_GPIO_NUM 34 #define Y7_GPIO_NUM 39 #define Y6_GPIO_NUM 36 #define Y5_GPIO_NUM 21 #define Y4_GPIO_NUM 19 #define Y3_GPIO_NUM 18 #define Y2_GPIO_NUM 5 #define VSYNC_GPIO_NUM 25 #define HREF_GPIO_NUM 23 #define PCLK_GPIO_NUM 22 static camera_config_t camera_config = { .pin_pwdn = PWDN_GPIO_NUM, .pin_reset = RESET_GPIO_NUM, .pin_xclk = XCLK_GPIO_NUM, .pin_sscb_sda = SIOD_GPIO_NUM, .pin_sscb_scl = SIOC_GPIO_NUM, .pin_d7 = Y9_GPIO_NUM, .pin_d6 = Y8_GPIO_NUM, .pin_d5 = Y7_GPIO_NUM, .pin_d4 = Y6_GPIO_NUM, .pin_d3 = Y5_GPIO_NUM, .pin_d2 = Y4_GPIO_NUM, .pin_d1 = Y3_GPIO_NUM, .pin_d0 = Y2_GPIO_NUM, .pin_vsync = VSYNC_GPIO_NUM, .pin_href = HREF_GPIO_NUM, .pin_pclk = PCLK_GPIO_NUM, .xclk_freq_hz = 20000000, .ledc_timer = LEDC_TIMER_0, .ledc_channel = LEDC_CHANNEL_0, .pixel_format = PIXFORMAT_JPEG, .frame_size = FRAMESIZE_VGA, .jpeg_quality = 12, .fb_count = 1, }; void wifi_init() { WiFi.mode(WIFI_STA); WiFi.setSleep(false); //关闭STA模式下wifi休眠,提高响应速度 WiFi.begin(ssid, password); while (WiFi.status() != WL_CONNECTED) { delay(500); Serial.print("."); } Serial.println("WiFi Connected!"); Serial.print("IP Address:"); Serial.println(WiFi.localIP()); } esp_err_t camera_init() { //initialize the camera esp_err_t err = esp_camera_init(&camera_config); if (err != ESP_OK) { Serial.println("Camera Init Failed"); return err; } sensor_t * s = esp_camera_sensor_get(); //initial sensors are flipped vertically and colors are a bit saturated if (s->id.PID == OV2640_PID) { // s->set_vflip(s, 1);//flip it back // s->set_brightness(s, 1);//up the blightness just a bit // s->set_contrast(s, 1); } Serial.println("Camera Init OK!"); return ESP_OK; } void setup() { Serial.begin(115200); wifi_init(); camera_init(); } void loop() { Serial.println("Try To Connect TCP Server!"); if (client.connect(serverIP, serverPort)) //尝试访问目标地址 { Serial.println("Connect Tcp Server Success!"); //client.println("Frame Begin"); //46 72 61 6D 65 20 42 65 67 69 6E // 0D 0A 代表换行 //向服务器发送数据 while (1){ camera_fb_t * fb = esp_camera_fb_get(); uint8_t * temp = fb->buf; //这个是为了保存一个地址,在摄像头数据发送完毕后需要返回,否则会出现板子发送一段时间后自动重启,不断重复 if (!fb) { Serial.println( "Camera Capture Failed"); } else { //先发送Frame Begin 表示开始发送图片 然后将图片数据分包发送 每次发送1430 余数最后发送 //完毕后发送结束标志 Frame Over 表示一张图片发送完毕 client.print("Frame Begin"); //一张图片的起始标志 // 将图片数据分段发送 int leng = fb->len; int timess = leng/maxcache; int extra = leng%maxcache; for(int j = 0;j< timess;j++) { client.write(fb->buf, maxcache); for(int i =0;i< maxcache;i++) { fb->buf++; } } client.write(fb->buf, extra); client.print("Frame Over"); // 一张图片的结束标志 Serial.print("This Frame Length:"); Serial.print(fb->len); Serial.println(".Succes To Send Image For TCP!"); //return the frame buffer back to the driver for reuse fb->buf = temp; //将当时保存的指针重新返还 esp_camera_fb_return(fb); //这一步在发送完毕后要执行,具体作用还未可知。 } delay(20);//短暂延时 增加数据传输可靠性 } /* while (client.connected() || client.available()) //如果已连接或有收到的未读取的数据 { if (client.available()) //如果有数据可读取 { String line = client.readStringUntil('\n'); //读取数据到换行符 Serial.print("ReceiveData:"); Serial.println(line); client.print("--From ESP32--:Hello Server!"); } } Serial.println("close connect!"); client.stop(); //关闭客户端 */ } else { Serial.println("Connect To Tcp Server Failed!After 10 Seconds Try Again!"); client.stop(); //关闭客户端 } delay(10000); }
以下是nodejs搭建tcp服务器的代码:
因为自己没用过nodejs服务器,当时弄这个花了不时间。这里同时记录下nodejs服务器的简单搭建。这里用的是宝塔搭建的:
以上nodejs的简单搭建就算完成。
如果想在本地启动,启动方式是:
node app.js
// 创建tcp连接服务 const net = require('net') const { size } = require('underscore') const HOST = '192.168.0.2' const PORT = 8080 // 统计连接客户端的个数 var count = 0 // 创建slave_server服务 const slave_server = new net.createServer() // slave_server.setEncoding = 'UTF-8' // 保存监视器的socket var s // 获得一个连接,该链接自动关联scoket对象 var tcp_sock = null slave_server.on('connection', sock => { tcp_sock = sock sock.name = ++count console.log(`当前连接客户端数:${count}`) // 接收client发来的信息 sock.on('data', data => { // console.log(`客户端${sock.name}发来一个信息:${data}`) // 判断是否为监视器发来的链接 if (data == 'monitor') { // 则把当前的socket保存起来 s = sock } else { s.write(data) } }) // 为socket添加error事件处理函数 sock.on('error', error => { //监听客户端异常 console.log('error' + error) sock.end() }) // 服务器关闭时触发,如果存在连接,这个事件不会被触发,直到所有的连接关闭 sock.on('close', () => { console.log(`客户端${sock.name}下线了`) count -= 1 }) }) // listen函数开始监听指定端口 slave_server.listen(PORT, () => { console.log(`服务器已启动,运行在:http://xxxx:xxxx`) })
python客户端:
这边是在windows上启动的
python app.py
import socket import threading import time import numpy as np import cv2 import subprocess from moviepy.editor import * begin_data = b'Frame Begin' end_data = b'Frame Over' #接收数据 # ESP32发送一张照片的流程 # 先发送Frame Begin 表示开始发送图片 然后将图片数据分包发送 每次发送1430 余数最后发送 # 完毕后发送结束标志 Frame Over 表示一张图片发送完毕 # 1430 来自ESP32cam发送的一个包大小为1430 接收到数据 data格式为b'' def handle_sock(sock, addr): temp_data = b'' t1 = int(round(time.time() * 1000)) while True: data = sock.recv(1430) # 如果这一帧数据包的开头是 b'Frame Begin' 则是一张图片的开始 if data[0:len(begin_data)] == begin_data: # 将这一帧数据包的开始标志信息(b'Frame Begin')清除 因为他不属于图片数据 data = data[len(begin_data):len(data)] # 判断这一帧数据流是不是最后一个帧 最后一针数据的结尾时b'Frame Over' while data[-len(end_data):] != end_data: temp_data = temp_data + data # 不是结束的包 讲数据添加进temp_data data = sock.recv(1430)# 继续接受数据 直到接受的数据包包含b'Frame Over' 表示是这张图片的最后一针 # 判断为最后一个包 将数据去除 结束标志信息 b'Frame Over' temp_data = temp_data + data[0:(len(data) - len(end_data))] # 将多余的(\r\nFrame Over)去掉 其他放入temp_data # 显示图片 receive_data = np.frombuffer(temp_data, dtype='uint8') # 将获取到的字符流数据转换成1维数组 r_img = cv2.imdecode(receive_data, cv2.IMREAD_COLOR) # 将数组解码成图像 r_img = r_img.reshape(480, 640, 3) t2 = int(round(time.time() * 1000)) fps = 1000//(t2-t1) cv2.putText(r_img, "FPS" + str(fps), (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 2) cv2.imshow('server_frame', r_img) if cv2.waitKey(1) & 0xFF == ord('q'): break t1 = t2 print("接收到的数据包大小:" + str(len(temp_data))) # 显示该张照片数据大小 temp_data = b'' # 清空数据 便于下一章照片使用 server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.connect(("192,168,0,2", 8080)) server.send('monitor'.encode("utf-8")) #主线程循环接收客户端连接 handle_sock(server, '')
全部代码就这些,主要的流程是:
esp32上传到nodejs服务器->nodejs服务器接收转发->python客户端请求服务器,服务器下发
但是这里的python客户端必须用在windows,因为python的cv2.imshow是具有图像界面的,用在服务器上会报错(反正我用的时候会报错,有更好的办法可以告诉我)
以上代码出处:
链接: https://blog.csdn.net/phoenix3k/article/details/128446232
这里的服务端也可以直接使用python+ffmpeg转发成视频流,但是我实现不了,技术不够。能解决的朋友麻烦踢下我。
因为我的业务是想要直接能在公网上播放的,所以上面的方案我的技术实现不了,所以我采用了planB。
2.esp32cam+python+vue(websocket)
esp32cam:
#include "WiFi.h" #include "esp_camera.h" //#include "base64.h" #include <ArduinoJson.h> #include <WebSocketsClient.h> //#include <SocketIOclient.h> // Pin definition for CAMERA_MODEL_AI_THINKER #define PWDN_GPIO_NUM 32 #define RESET_GPIO_NUM -1 #define XCLK_GPIO_NUM 0 #define SIOD_GPIO_NUM 26 #define SIOC_GPIO_NUM 27 #define Y9_GPIO_NUM 35 #define Y8_GPIO_NUM 34 #define Y7_GPIO_NUM 39 #define Y6_GPIO_NUM 36 #define Y5_GPIO_NUM 21 #define Y4_GPIO_NUM 19 #define Y3_GPIO_NUM 18 #define Y2_GPIO_NUM 5 #define VSYNC_GPIO_NUM 25 #define HREF_GPIO_NUM 23 #define PCLK_GPIO_NUM 22 // Replace with your network credentials const char* hostname = "ESP32CAM"; const char* ssid = "Tenda_66A6B0"; const char* password = "12345678"; WebSocketsClient webSocket; void webSocketEvent(WStype_t type, uint8_t * payload, size_t length) { switch(type) { case WStype_DISCONNECTED: Serial.printf("[WSc] Disconnected!\n"); break; case WStype_CONNECTED: { Serial.printf("[WSc] Connected to url: %s\n", payload); //webSocket.sendTXT("camlogin"); } break; case WStype_TEXT: Serial.printf("[WSc] get text: %s\n", payload); break; case WStype_BIN: // Serial.printf("[WSc] get binary length: %u\n", length); break; case WStype_PING: // pong will be send automatically Serial.printf("[WSc] get ping\n"); break; case WStype_PONG: // answer to a ping we send Serial.printf("[WSc] get pong\n"); break; } } void setupCamera() { camera_config_t config; config.ledc_channel = LEDC_CHANNEL_0; config.ledc_timer = LEDC_TIMER_0; config.pin_d0 = Y2_GPIO_NUM; config.pin_d1 = Y3_GPIO_NUM; config.pin_d2 = Y4_GPIO_NUM; config.pin_d3 = Y5_GPIO_NUM; config.pin_d4 = Y6_GPIO_NUM; config.pin_d5 = Y7_GPIO_NUM; config.pin_d6 = Y8_GPIO_NUM; config.pin_d7 = Y9_GPIO_NUM; config.pin_xclk = XCLK_GPIO_NUM; config.pin_pclk = PCLK_GPIO_NUM; config.pin_vsync = VSYNC_GPIO_NUM; config.pin_href = HREF_GPIO_NUM; config.pin_sscb_sda = SIOD_GPIO_NUM; config.pin_sscb_scl = SIOC_GPIO_NUM; config.pin_pwdn = PWDN_GPIO_NUM; config.pin_reset = RESET_GPIO_NUM; config.xclk_freq_hz = 20000000; config.pixel_format = PIXFORMAT_JPEG; config.frame_size = FRAMESIZE_QVGA; // FRAMESIZE_ + QVGA|CIF|VGA|SVGA|XGA|SXGA|UXGA config.jpeg_quality = 10; config.fb_count = 1; // Init Camera esp_err_t err = esp_camera_init(&config); if (err != ESP_OK) { Serial.printf("Camera init failed with error 0x%x", err); return; } } void setup(){ Serial.begin(115200); // Connect to Wi-Fi WiFi.begin(ssid, password); while (WiFi.status() != WL_CONNECTED) { delay(1000); Serial.println("Connecting to WiFi.."); } // Print ESP32 Local IP Address Serial.println(WiFi.localIP()); setupCamera(); // server address, port and URL webSocket.begin("192,168,0,2",8080); webSocket.onEvent(webSocketEvent); webSocket.setReconnectInterval(5000); webSocket.enableHeartbeat(15000, 3000, 2); } unsigned long messageTimestamp = 0; void loop() { webSocket.loop(); uint64_t now = millis(); if(now - messageTimestamp > 10) { messageTimestamp = now; camera_fb_t * fb = NULL; // Take Picture with Camera fb = esp_camera_fb_get(); if(!fb) { Serial.println("Camera capture failed"); return; } webSocket.sendBIN(fb->buf,fb->len); Serial.println("Image sent"); esp_camera_fb_return(fb); } }
python服务器:
如果要多人查看可以在代码中吧注释打开,然后吧await USERS[len(USERS)-1].send(data)注释,但是这里会报code = 1001 (going away),暂时解决不了,希望有缘人可以告知
if len(USERS) >= 2: await USERS[len(USERS)-1].send(data) #for USER in USERS: # await USER.send(data)
import asyncio import websockets import cv2 as cv import binascii import time import numpy as np USERS = [] async def recv_msg(websocket): USERS.append(websocket) print('user',USERS,len(USERS)) while True: try: recv_text = await websocket.recv() data = binascii.a2b_hex(recv_text.hex()) data1 = np.frombuffer(data, dtype=np.uint8) lenmes=len(data) if lenmes < 1000: continue img = cv.imdecode(data1, 1) #cv.imshow('result', img) cv.waitKey(1) if len(USERS) >= 2: await USERS[len(USERS)-1].send(data) #for USER in USERS: # await USER.send(data) # asyncio.ensure_future(USER.send(data)) #time.sleep(0.01) data = '' finally: pass async def main_logic(websocket, path): await recv_msg(websocket) # 把ip换成自己本地的ip start_server = websockets.serve(main_logic, "",8080) asyncio.get_event_loop().run_until_complete(start_server) asyncio.get_event_loop().run_forever()
前端vue展示:
esp.vue
<template> <div> <img id="show" style="width:300px;" /> </div> </template> <script src="./esp.js"></script>
esp.js
/* eslint-disable */ export default { components: { }, data() { return { url: "", websocket: null } }, mounted() { this.initWebSocket(); }, created() { }, methods: { initWebSocket() { var that = this var wsUri = "ws://192.168.0.2:8080/"; this.websocket = new WebSocket(wsUri); this.websocket.onopen = function (evt) { that.onOpen(evt) }; this.websocket.onclose = function (evt) { that.onClose(evt) }; this.websocket.onmessage = function (evt) { that.onMessage(evt) }; this.websocket.onerror = function (evt) { that.onError(evt) }; }, onOpen(evt) { console.log("CONNECTED") }, onClose(evt) { console.log("DISCONNECTED") }, onMessage(evt) { var reader = new FileReader(); reader.onload = function (eve) { if (eve.target.readyState == FileReader.DONE) { var img = document.getElementById("show"); img.src = this.result; } }; reader.readAsDataURL(event.data); }, onError(evt) { console.log(evt.data) } } }
以上就可以在网页前段查看视频。
主要流程是:
esp32cam上传 图片帧到服务器上->python接收图片下发->vue展示
以上代码出处:
链接: https://github.com/jeiry/esp32cam-and-mp
小结
esp32cam 摄像头分辨率修改:
config.frame_size FRAMESIZE_UXGA (1600 x 1200)
FRAMESIZE_QVGA (320 x 240)
FRAMESIZE_CIF (352 x 288)
FRAMESIZE_VGA (640 x 480)
FRAMESIZE_SVGA (800 x 600)
FRAMESIZE_XGA (1024 x 768)
FRAMESIZE_SXGA (1280 x 1024)
像素格式:
config.pixel_formatPIXFORMAT_RGB565
PIXFORMAT_YUV422
PIXFORMAT_GRAYSCALE
PIXFORMAT_JPEG
结语:
这里只是提供一下例子,有更好的方法希望可以告知一下。大家一起相互学习