luk*_*ecv 5 c++ opencv i2c raspberry-pi3
我正在使用通过 i2c 连接到 Raspberry Pi 3 的Melexix MLX90640 32x24 热像仪传感器。
使用Pimoroni的代码,我可以通过帧缓冲区及其fbuf示例在屏幕上显示带有假颜色的相机数据。
由于它直接显示在帧缓冲区上,而不是视频流或相机设备上,因此我无法在OpenCV中读取它。我想使用Open CV中的视频流来统计房间里的人数,但不知道如何修改fbuf代码以输出视频。
它不需要是视频,只是 OpenCV 可以连续读取的图像流
我安装了v4l2loopback以在 Pi 上创建虚拟摄像头设备,地址为/dev/video0
。然后我使用Gstreamer创建 fbuf 代码将红外相机伪色数据写入的屏幕特定区域的流。这创建了一个可由 OpenCV 读取的流,但它没有更新流中的热图像数据。有时图像数据会部分显示,但可以正常显示 Pi 桌面。它也看起来不优雅且有缺陷,所以我想要一个更可靠的解决方案。
Lepton有一个使用 v4l2loopback 中的ondemandcam示例的传感器工作示例,但这是一个不同的传感器,它使用 SPI 而不是 i2c 进行通信。
我的目标是将此代码与 Pimoroni 的帧捕获代码 fbuf 结合起来,从传感器获取稳定的视频流,以便我可以将其导入 OpenCV。
Lepton 的代码基于 v4l2loopback 中的 ondemandcam 示例。它将自己的传感器代码添加到该grab_frame()
函数中。该open_vpipe()
功能与 ondemandcam 示例相同。
如果我可以将 fbuf 中的帧缓冲区代码放入该grab_frame()
函数中,那么我认为它会起作用。我不确定该怎么做。
这个 for 循环似乎是我需要放入函数中的内容grab_frame()
。
for(int y = 0; y < 24; y++){
for(int x = 0; x < 32; x++){
float val = mlx90640To[32 * (23-y) + x];
put_pixel_false_colour((y*IMAGE_SCALE), (x*IMAGE_SCALE), val);
}
}
Run Code Online (Sandbox Code Playgroud)
#include <stdint.h>
#include <iostream>
#include <cstring>
#include <fstream>
#include <chrono>
#include <thread>
#include <math.h>
#include "headers/MLX90640_API.h"
#include "lib/fb.h"
#define MLX_I2C_ADDR 0x33
#define IMAGE_SCALE 5
// Valid frame rates are 1, 2, 4, 8, 16, 32 and 64
// The i2c baudrate is set to 1mhz to support these
#define FPS 8
#define FRAME_TIME_MICROS (1000000/FPS)
// Despite the framerate being ostensibly FPS hz
// The frame is often not ready in time
// This offset is added to the FRAME_TIME_MICROS
// to account for this.
#define OFFSET_MICROS 850
void put_pixel_false_colour(int x, int y, double v) {
// Heatmap code borrowed from:
http://www.andrewnoske.com/wiki/Code_-_heatmaps_and_color_gradients
const int NUM_COLORS = 7;
static float color[NUM_COLORS][3] = { {0,0,0}, {0,0,1}, {0,1,0}, {1,1,0}, {1,0,0}, {1,0,1}, {1,1,1} };
int idx1, idx2;
float fractBetween = 0;
float vmin = 5.0;
float vmax = 50.0;
float vrange = vmax-vmin;
v -= vmin;
v /= vrange;
if(v <= 0) {idx1=idx2=0;}
else if(v >= 1) {idx1=idx2=NUM_COLORS-1;}
else
{
v *= (NUM_COLORS-1);
idx1 = floor(v);
idx2 = idx1+1;
fractBetween = v - float(idx1);
}
int ir, ig, ib;
ir = (int)((((color[idx2][0] - color[idx1][0]) * fractBetween) + color[idx1][0]) * 255.0);
ig = (int)((((color[idx2][1] - color[idx1][1]) * fractBetween) + color[idx1][1]) * 255.0);
ib = (int)((((color[idx2][2] - color[idx1][2]) * fractBetween) + color[idx1][2]) * 255.0);
for(int px = 0; px < IMAGE_SCALE; px++){
for(int py = 0; py < IMAGE_SCALE; py++){
fb_put_pixel(x + px, y + py, ir, ig, ib);
}
}
}
int main(){
static uint16_t eeMLX90640[832];
float emissivity = 1;
uint16_t frame[834];
static float image[768];
static float mlx90640To[768];
float eTa;
static uint16_t data[768*sizeof(float)];
auto frame_time = std::chrono::microseconds(FRAME_TIME_MICROS + OFFSET_MICROS);
MLX90640_SetDeviceMode(MLX_I2C_ADDR, 0);
MLX90640_SetSubPageRepeat(MLX_I2C_ADDR, 0);
switch(FPS){
case 1:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b001);
break;
case 2:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b010);
break;
case 4:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b011);
break;
case 8:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b100);
break;
case 16:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b101);
break;
case 32:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b110);
break;
case 64:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b111);
break;
default:
printf("Unsupported framerate: %d", FPS);
return 1;
}
MLX90640_SetChessMode(MLX_I2C_ADDR);
paramsMLX90640 mlx90640;
MLX90640_DumpEE(MLX_I2C_ADDR, eeMLX90640);
MLX90640_ExtractParameters(eeMLX90640, &mlx90640);
fb_init();
while (1){
auto start = std::chrono::system_clock::now();
MLX90640_GetFrameData(MLX_I2C_ADDR, frame);
MLX90640_InterpolateOutliers(frame, eeMLX90640);
eTa = MLX90640_GetTa(frame, &mlx90640);
MLX90640_CalculateTo(frame, &mlx90640, emissivity, eTa, mlx90640To);
for(int y = 0; y < 24; y++){
for(int x = 0; x < 32; x++){
float val = mlx90640To[32 * (23-y) + x];
put_pixel_false_colour((y*IMAGE_SCALE), (x*IMAGE_SCALE), val);
}
}
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::microseconds>(end - start);
std::this_thread::sleep_for(std::chrono::microseconds(frame_time - elapsed));
}
fb_cleanup();
return 0;
}
Run Code Online (Sandbox Code Playgroud)
我在顶部添加了这个:
#include "opencv2/core/core.hpp"
using namespace cv;
using namespace std;
Run Code Online (Sandbox Code Playgroud)
然后我按照您的建议修改了循环,但它不会编译。
现在我只有一个编译错误。
error:no match for 'operator[[]' (operand types are 'cv::Mat' and 'int')
test_mat[y,x] = val;
^
Run Code Online (Sandbox Code Playgroud)
现在编译错误消失了,但这些错误却出现了。
g++ -I. -std=c++11 -std=c++11 -c -o examples/fbuf.o examples/fbuf.cpp
g++ -L/home/pi/mlx90640-library examples/fbuf.o examples/lib/fb.o libMLX90640_API.a -o fbuf -lbcm2835
examples/fbuf.o: In function `cv::Mat::Mat(int, int, int, void*, unsigned int)':
fbuf.cpp:(.text._ZN2cv3MatC2EiiiPvj[_ZN2cv3MatC5EiiiPvj]+0x144): undefined reference to `cv::error(int, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, char const*, char const*, int)'
fbuf.cpp:(.text._ZN2cv3MatC2EiiiPvj[_ZN2cv3MatC5EiiiPvj]+0x21c): undefined reference to `cv::error(int, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, char const*, char const*, int)'
fbuf.cpp:(.text._ZN2cv3MatC2EiiiPvj[_ZN2cv3MatC5EiiiPvj]+0x2ac): undefined reference to `cv::Mat::updateContinuityFlag()'
examples/fbuf.o: In function `cv::Mat::~Mat()':
fbuf.cpp:(.text._ZN2cv3MatD2Ev[_ZN2cv3MatD5Ev]+0x3c): undefined reference to `cv::fastFree(void*)'
examples/fbuf.o: In function `cv::Mat::release()':
fbuf.cpp:(.text._ZN2cv3Mat7releaseEv[_ZN2cv3Mat7releaseEv]+0x68): undefined reference to `cv::Mat::deallocate()'
collect2: error: ld returned 1 exit status
Makefile:37: recipe for target 'fbuf' failed
make: *** [fbuf] Error 1
Run Code Online (Sandbox Code Playgroud)
现在程序可以编译了。我必须对 makefile.txt 进行添加
我补充道:
CPPFLAGS = `pkg-config --cflags opencv`
LDLIBS = `pkg-config --libs opencv`
Run Code Online (Sandbox Code Playgroud)
以及$(I2C_LIBS) $(CPPFLAGS
以下命令:
fbuf: examples/fbuf.o examples/lib/fb.o libMLX90640_API.a
$(CXX) -L/home/pi/mlx90640-library $^ -o $@ $(I2C_LIBS) $(CPPFLAGS) $(LDLIBS)
Run Code Online (Sandbox Code Playgroud)
现在我的循环看起来像这样,但是当我运行程序时没有输出。由于我不再使用假图像功能,如何显示垫子上的图像?
感谢Mark Setchell和 Pimoroni 的原始代码,我现在有了将 MLX90640 传感器数据导入 OpenCV 的代码。我还可以用applyColorMap
OpenCV 中的内置函数替换伪色函数。
现在我可以开始使用 OpenCV 来处理数据了。我使用了多次 Mat 转换来获得最终图像。可能有一种更有效的方法可以做到这一点。
#include <stdint.h>
#include <iostream>
#include <cstring>
#include <fstream>
#include <chrono>
#include <thread>
#include <math.h>
#include "headers/MLX90640_API.h"
#include "opencv2/opencv.hpp"
using namespace cv;
using namespace std;
#define MLX_I2C_ADDR 0x33
// Valid frame rates are 1, 2, 4, 8, 16, 32 and 64
// The i2c baudrate is set to 1mhz to support these
#define FPS 8
#define FRAME_TIME_MICROS (1000000/FPS)
// Despite the framerate being ostensibly FPS hz
// The frame is often not ready in time
// This offset is added to the FRAME_TIME_MICROS
// to account for this.
#define OFFSET_MICROS 850
int main(){
static uint16_t eeMLX90640[832];
float emissivity = 1;
uint16_t frame[834];
static float image[768];
static float mlx90640To[768];
float eTa;
static uint16_t data[768*sizeof(float)];
auto frame_time = std::chrono::microseconds(FRAME_TIME_MICROS + OFFSET_MICROS);
MLX90640_SetDeviceMode(MLX_I2C_ADDR, 0);
MLX90640_SetSubPageRepeat(MLX_I2C_ADDR, 0);
switch(FPS){
case 1:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b001);
break;
case 2:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b010);
break;
case 4:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b011);
break;
case 8:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b100);
break;
case 16:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b101);
break;
case 32:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b110);
break;
case 64:
MLX90640_SetRefreshRate(MLX_I2C_ADDR, 0b111);
break;
default:
printf("Unsupported framerate: %d", FPS);
return 1;
}
MLX90640_SetChessMode(MLX_I2C_ADDR);
paramsMLX90640 mlx90640;
MLX90640_DumpEE(MLX_I2C_ADDR, eeMLX90640);
MLX90640_ExtractParameters(eeMLX90640, &mlx90640);
while (1){
auto start = std::chrono::system_clock::now();
MLX90640_GetFrameData(MLX_I2C_ADDR, frame);
MLX90640_InterpolateOutliers(frame, eeMLX90640);
eTa = MLX90640_GetTa(frame, &mlx90640);
MLX90640_CalculateTo(frame, &mlx90640, emissivity, eTa, mlx90640To);
Mat IR_mat (32,24, CV_32FC1, data);
for(int y = 0; y < 24; y++){
for(int x = 0; x < 32; x++){
float val = mlx90640To[32 * (23-y) + x];
IR_mat.at<float>(x,y) = val;
}
}
// Normalize the mat
Mat normal_mat;
normalize(IR_mat, normal_mat, 0,1.0, NORM_MINMAX, CV_32FC1);
// Convert Mat to CV_U8 to use applyColorMap
double minVal, maxVal;
minMaxLoc(normal_mat, &minVal, &maxVal);
Mat u8_mat;
normal_mat.convertTo(u8_mat, CV_8U, 255.0/(maxVal - minVal), -minVal);
// Resize mat
Mat size_mat;
resize(u8_mat, size_mat, Size(240,320), INTER_CUBIC);
// Apply false color
Mat falsecolor_mat;
applyColorMap(size_mat, falsecolor_mat, COLORMAP_JET);
// Display stream in window
namedWindow( "IR Camera Window");
imshow ("IR Camera Window", falsecolor_mat);
waitKey(1);
auto end = std::chrono::system_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::microseconds>(end - start);
std::this_thread::sleep_for(std::chrono::microseconds(frame_time - elapsed));
}
return 0;
}
Run Code Online (Sandbox Code Playgroud)