Search

2013年3月25日 星期一

wxKinect

  話說老師的Lab東西很多昨天借了Kinect來玩一下,寫一個Demo試試SDK可以很簡單讀取骨架也可以選擇顯示Color & Depth & Canny...等等的影像處理,感覺OpenNI很好用,OpenNI 2在Windows下已經不使用這個Driver了而改用MS Kinect SDK的Driver,整體來說影像格式轉換比較浪費時間 OpenNI & NiTE => OpenCV => wxWidgets 不過應付FPS只有30的Kinect已經足夠,使用OpenNI或其他Middleware如果在VS2008含以下必須判斷一下然後做一些Define,收到的深度影像是CV_16UC1且數值是0 ~ 4096(10000),要轉換成CV_8UC1要把數值從0 ~ 4096(10000)映射到0 ~ 255畫出來顏色才不會太黑,如果要轉換到wxImage直接灰階轉彩色就好(CV_GRAY2BGR),然後如果要Release Binarry就要把OpenNI含Middleware的SDK安裝目錄下的Redist目錄內的動態連結檔案與Redist底下還有一個資料夾放一起就可以了。





wxKinect.h


#ifndef __WX_KINECT__
#define __WX_KINECT__

#if _MSC_VER < 1600
typedef __int64 int64_t;
typedef unsigned __int64 uint64_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int16 uint16_t;
typedef unsigned __int8 uint8_t;
#else
#include <stdint.h>
#endif


#include <wx/wx.h>

#include <highgui.h>
#include <cv.h>

#include <OpenNI.h>
#include <NiTE.h>

const int SCREEN_WIDTH = 640;
const int SCREEN_HIGHT = 480;

const int COLOR_MODE = 1;
const int DEPTH_MODE = 2;
const int CANNY_MODE = 3;

enum{
ID_COLOR_FRAME = 100,
ID_DEPTH_FRAME,
ID_CANNY_FRAME
};

class App:public wxApp
{
public:
bool OnInit();
};

class Frame:public wxFrame
{
public:
Frame(const wxString&);
~Frame();

void CreateUI();
void InitKinect();

void Display();

void OnExit(wxCommandEvent&);
void OnColor(wxCommandEvent&);
void OnDepth(wxCommandEvent&);
void OnCanny(wxCommandEvent&);
private:
friend class Thread;
Thread *thread;

openni::Device device;
nite::UserTracker user_tracker;
openni::VideoMode color_mode;
openni::VideoStream color_stream;
openni::VideoMode depth_mode;
openni::VideoStream depth_stream;
openni::VideoFrameRef color_frame;
openni::VideoFrameRef depth_frame;
nite::UserTrackerFrameRef user_tracker_frame;
int max_depth;

int select_mode;

wxPanel *screen;

DECLARE_EVENT_TABLE()
};

BEGIN_EVENT_TABLE(Frame,wxFrame)
EVT_MENU(wxID_EXIT,Frame::OnExit)
EVT_MENU(ID_COLOR_FRAME,Frame::OnColor)
EVT_MENU(ID_DEPTH_FRAME,Frame::OnDepth)
EVT_MENU(ID_CANNY_FRAME,Frame::OnCanny)
END_EVENT_TABLE()

class Thread:public wxThread
{
public:
Thread(Frame*);

void* Entry();
private:
Frame *frame;
};

#endif


wxKinect.cpp


#include "wxKinect.h"

DECLARE_APP(App)
IMPLEMENT_APP(App)

bool App::OnInit()
{
Frame *frame = new Frame(wxT("wxKinect"));

frame->Show(true);

return true;
}

Frame::Frame(const wxString &title):wxFrame(NULL,wxID_ANY,title,wxDefaultPosition,wxSize(800,600),wxMINIMIZE_BOX | wxCLOSE_BOX | wxCAPTION | wxSYSTEM_MENU)
{
InitKinect();
CreateUI();

select_mode = COLOR_MODE;

thread = new Thread(this);
thread->Create();
thread->Run();
}

Frame::~Frame()
{
thread->Delete();

color_stream.destroy();
depth_stream.destroy();
device.close();
openni::OpenNI::shutdown();
nite::NiTE::shutdown();
}

void Frame::CreateUI()
{
wxMenu *file = new wxMenu;
file->Append(wxID_EXIT,wxT("E&xit\tAlt-q"),wxT("exit"));

wxMenu *select = new wxMenu;
select->AppendRadioItem(ID_COLOR_FRAME,wxT("c&olor\tCtrl-c"),wxT("color"));
select->AppendRadioItem(ID_DEPTH_FRAME,wxT("d&epth\tCtrl-d"),wxT("depth"));
select->AppendRadioItem(ID_CANNY_FRAME,wxT("c&anny\tCtrl-x"),wxT("canny"));

wxMenuBar *bar = new wxMenuBar;
bar->Append(file,wxT("file"));
bar->Append(select,wxT("select"));
SetMenuBar(bar);

wxBoxSizer *top = new wxBoxSizer(wxVERTICAL);
this->SetSizer(top);

wxBoxSizer *screen_box = new wxBoxSizer(wxHORIZONTAL);
top->Add(screen_box,0,wxALIGN_CENTER_HORIZONTAL | wxALL,5);

screen = new wxPanel(this,wxID_ANY,wxDefaultPosition,wxSize(SCREEN_WIDTH,SCREEN_HIGHT));
screen_box->Add(screen,0,wxALIGN_CENTER_HORIZONTAL | wxALIGN_CENTER_VERTICAL,5);

CreateStatusBar(2);
SetStatusText(wxDateTime::Now().Format());
}

void Frame::InitKinect()
{
openni::OpenNI::initialize();
nite::NiTE::initialize();

device.open(openni::ANY_DEVICE);
device.setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);

user_tracker.create();
user_tracker.setSkeletonSmoothingFactor(0.1f);

color_mode.setFps(30);
color_mode.setResolution(SCREEN_WIDTH,SCREEN_HIGHT);
color_mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);

color_stream.setVideoMode(color_mode);
color_stream.create(device,openni::SENSOR_COLOR);
color_stream.start();

depth_mode.setFps(30);
depth_mode.setResolution(SCREEN_WIDTH,SCREEN_HIGHT);
depth_mode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_100_UM);

depth_stream.setVideoMode(depth_mode);
depth_stream.create(device,openni::SENSOR_DEPTH);
depth_stream.start();
max_depth = depth_stream.getMaxPixelValue();
}

void Frame::Display()
{
color_stream.readFrame(&color_frame);
depth_stream.readFrame(&depth_frame);
user_tracker.readFrame(&user_tracker_frame);

cv::Mat RGBMat(color_frame.getHeight(),color_frame.getWidth(),CV_8UC3,(void*)color_frame.getData());
IplImage color_image(RGBMat);
cvCvtColor(&color_image,&color_image,CV_RGB2BGR);

cv::Mat DepthMat(depth_frame.getHeight(),depth_frame.getWidth(),CV_16UC1,(void*)depth_frame.getData());
DepthMat.convertTo(DepthMat,CV_8UC1,255.0f / max_depth);
IplImage depth_image(DepthMat);

IplImage *select_image = cvCreateImage(cvGetSize(&color_image),IPL_DEPTH_8U,3);

if(select_mode == COLOR_MODE){
cvCopyImage(&color_image,select_image);
}
else if(select_mode == DEPTH_MODE){
cvCvtColor(&depth_image,select_image,CV_GRAY2BGR);
}
else if(select_mode == CANNY_MODE){
cvCanny(&depth_image,&depth_image,50,200);
cvCvtColor(&depth_image,select_image,CV_GRAY2BGR);
}

const nite::Array<nite::UserData> &users = user_tracker_frame.getUsers();
for(int i = 0;i < users.getSize();++i){
const nite::UserData &user = users[i];

if(user.isNew()){
user_tracker.startSkeletonTracking(user.getId());
}
else if(user.isLost()){
}

if(user.isVisible()){
const nite::Skeleton &skeleton = user.getSkeleton();
nite::SkeletonJoint joints[15];

if(skeleton.getState() == nite::SKELETON_TRACKED){
joints[0] = skeleton.getJoint(nite::JOINT_HEAD);
joints[1] = skeleton.getJoint(nite::JOINT_NECK);
joints[2] = skeleton.getJoint(nite::JOINT_LEFT_SHOULDER);
joints[3] = skeleton.getJoint(nite::JOINT_RIGHT_SHOULDER);
joints[4] = skeleton.getJoint(nite::JOINT_LEFT_ELBOW);
joints[5] = skeleton.getJoint(nite::JOINT_RIGHT_ELBOW);
joints[6] = skeleton.getJoint(nite::JOINT_LEFT_HAND);
joints[7] = skeleton.getJoint(nite::JOINT_RIGHT_HAND);
joints[8] = skeleton.getJoint(nite::JOINT_TORSO);
joints[9] = skeleton.getJoint(nite::JOINT_LEFT_HIP);
joints[10] = skeleton.getJoint(nite::JOINT_RIGHT_HIP);
joints[11] = skeleton.getJoint(nite::JOINT_LEFT_KNEE);
joints[12] = skeleton.getJoint(nite::JOINT_RIGHT_KNEE);
joints[13] = skeleton.getJoint(nite::JOINT_LEFT_FOOT);
joints[14] = skeleton.getJoint(nite::JOINT_RIGHT_FOOT);
}

CvPoint2D32f point[15];
for(int i = 0;i < 15;++i){
const nite::Point3f &pos = joints[i].getPosition();
user_tracker.convertJointCoordinatesToDepth(pos.x,pos.y,pos.z,&(point[i].x),&(point[i].y));
}

cvLine(select_image,cvPoint(point[0].x,point[0].y),cvPoint(point[1].x,point[1].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[1].x,point[1].y),cvPoint(point[2].x,point[2].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[1].x,point[1].y),cvPoint(point[3].x,point[3].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[2].x,point[2].y),cvPoint(point[4].x,point[4].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[3].x,point[3].y),cvPoint(point[5].x,point[5].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[4].x,point[4].y),cvPoint(point[6].x,point[6].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[5].x,point[5].y),cvPoint(point[7].x,point[7].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[1].x,point[1].y),cvPoint(point[8].x,point[8].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[8].x,point[8].y),cvPoint(point[9].x,point[9].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[8].x,point[8].y),cvPoint(point[10].x,point[10].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[9].x,point[9].y),cvPoint(point[11].x,point[11].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[10].x,point[10].y),cvPoint(point[12].x,point[12].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[11].x,point[11].y),cvPoint(point[13].x,point[13].y),CV_RGB(255,0,0),3);
cvLine(select_image,cvPoint(point[12].x,point[12].y),cvPoint(point[14].x,point[14].y),CV_RGB(255,0,0),3);

for(int i = 0;i < 15;++i){
if(joints[i].getPositionConfidence() > 0.5f){
cvCircle(select_image,cvPoint(point[i].x,point[i].y),3,CV_RGB(0,255,0),2);
}
else{
cvCircle(select_image,cvPoint(point[i].x,point[i].y),3,CV_RGB(0,0,255),2);
}
}
}
}

wxClientDC dc(screen);
cvConvertImage(select_image,select_image,CV_CVTIMG_SWAP_RB);
unsigned char *data;
cvGetRawData(select_image,&data);
wxImage *image = new wxImage(select_image->width,select_image->height,data,true);
wxBitmap *bitmap = new wxBitmap(*image);
int x,y,width,height;
dc.GetClippingBox(&x,&y,&width,&height);
dc.DrawBitmap(*bitmap,x,y);
delete image;
delete bitmap;

cvReleaseImage(&select_image);
}

void Frame::OnColor(wxCommandEvent &event)
{
select_mode = COLOR_MODE;
}

void Frame::OnDepth(wxCommandEvent &event)
{
select_mode = DEPTH_MODE;
}

void Frame::OnCanny(wxCommandEvent &event)
{
select_mode = CANNY_MODE;
}

void Frame::OnExit(wxCommandEvent &event)
{
Close();
}

Thread::Thread(Frame *parent):wxThread(wxTHREAD_DETACHED)
{
frame = parent;
}

void* Thread::Entry()
{
while(!TestDestroy()){
frame->Display();
}

return NULL;
}



2013年3月12日 星期二

wxRovio ver.0.1.0 - WowWee Rovio

  過年後跟人拿了一隻WowWee Rovio玩玩,玩了兩個禮拜發現好像沒有錄影的軟體,所以只好自己寫一個用,在GitHub看到roviolib於是參考了之後跟他一樣用Libcurl送參數給Rovio的CGI,格式為:http://<username>:<password>@<ip>/<command>(括弧要去掉),如果使用的Command會使CGI回傳數據,則必須要寫Callback去處理他,靜態影像處理很簡單判斷JPEG格式而已,不過Video Stream搞不太懂他傳回來的是MJPEG但是處理起來很怪大小也不一,所以參考roviolib作者的Callback之後感覺是在10k~50k之間,然後啟用Video Stream時就會一直接收所以要開個Thread才不會讓Main Thread掛住,另外有個Debug功能可以開啟I2C的功能可以看我下面的參考聯結,最後我改寫的wxRovio目前沒有全功能只有我常用的,然後用wxWidget刻一個Demo用的GUI,有時間再增加功能或改改GUI。(PS:如果按鈕hold住就直接Reboot或者按Stop試試)

使用功能前記得先登入否則會Exception XD,錄影(Video Start)錄完記得停止(Video Stop),頭部的控制按鈕如果一直按著會用分段的方式升降,如果只按一下就會直接升降到定點。



開啟夜間模式會根據Framerate除以的數字決定亮度,換句話說越亮的話Framerate越少,而Auto Gain Control以為跟控制有關,不過其實功能類似於夜間模式,但不同的地方在於這個功能不會減少Framerate但是Noise會增加不少,而這兩個功能都是Debug.cgi裡面的。



wxRovio錄影(錄一小時約1.2GB)



=============2013/03/14===============

一樣的Code在Linux上編過沒問題,當然得要有wxWidgets & OpenCV & Libcurl。




如果還有改版就寫Makefile XD


g++ -o2 -o wxRovio './AddUserDialog.cpp' './App.cpp' './DeleteUserDialog.cpp' './LoggingDialog.cpp' './SetCameraDialog.cpp' './SetOtherDialog.cpp' './wxRovio.cpp' `wx-config --cxxflags --libs --unicode` `pkg-config opencv --cflags --libs` -lcurl





參考: