Search

2013年4月13日 星期六

STM32F10xC DIY

  今天趁有空稍微確認了幾點,首先是電源電路用3.3v並聯三個0.1u陶瓷電容與一個1u無極性電解電容,接著reset電路可以參考這裡,然後8 or 12MHz Crystal配上22p陶瓷電容(PD0 OSC_IN、PD1 OSC_OUT),32.768KHz Crtstal配10p陶瓷電容(PC14-OSC32_IN、PC15-OSC32_OUT),VBAT與所有VDD都接3.3v然後所有VSS都接地,最後將USB to Serial的RX&TX接到STM32F10xC(我在STM32F100C8T6B測試)的PA9&PA10。

Reset Circuit



  接著toolchain因為自己DIY沒有STLINK所以必須用UART的方式去將binary upload到flash,在windows下可以用Keil ARM編譯code接著就可以用MCUISP將binary upload,而linux可以找ARM-Toolchain編譯code然後用stm32flash將binary upload,要注意的是upload前要先將BOOT 0接上3.3v然後最好再按下reset(因為像是MCUISP上一次燒錄完程式會自動reset但stm32flash不會),接著再將BOOT 0 接地並且按下reset按鈕就可以運作了。



  話說先畫成轉接板接到breadboard再搭電路對我來說比較保險,而且我不太會焊電子零件這樣小小一個我也要弄30分鐘(看以前學電子的學長不用一下就好了)...下次有空應該可以Layout出完整的版子了:Q

2013年4月7日 星期日

No.100 キタ━━━━━━(゚∀゚)━━━━━━!

  這篇是第一百篇!當初寫這blog最主要目的還是記錄電子DIY相關而且中文資料少的,不過後來看wxWidgets在台灣用的人不多(我只知道有某公司作的Game Tools用它寫UI)我這幾年寫的案子UI也都用它(三年前都用MFC到是Qt沒用來作案子過),不過其實API沒啥好寫的直接看DOCUMENT即可但是各種Library跟wxWidgets如何整合有的資料也很少所以我也寫,至於我主要研究的領域以後可能再開另一個blog紀錄,未來這裡文章方向大概也是這樣都寫比較輕鬆好玩打發時間的。

  話說最近買了STM32F407來玩,功能非常強大而且很便宜(戰略產品),雖然這兩三年免費幫人改寫過各種MCU Proto當額外Service,但是STM32比較特別的一點大概就是很少看到商業入門就是直接用Library,跟其它常見MCU一開始入門都是ASM也直接控制暫存器等等反而讓我很不習慣XD,而且看很多電子電機R&D大多也都自己搭MCU開發板,不過STM32的LQFP封裝對我來說實在太小腳距不太好弄而且板子也洗不好,弄一個48 pin的STM32F10xC轉板良率才20% XDD(下圖上方那塊),等在麵包板搭完測試後再Layout一個完整版,未來有時間大概都玩STM32(我覺得功能蠻棒的),還有研究寫RTOS等等...


基本運作作電路

看起來基本電路是正確(?)的可以Upload Program


突然發現STM32F10xC轉版沒焊好有的腳輸出不到...



2013年4月1日 星期一

wxKinect - Hand Detect

  前幾天看到一個日本人寫的這篇檢測手的方法,流程:Frame轉成HSV(因為要檢測顏色) =>  濾波(去除雜訊) => 將膚色作為閾值做二值化 => 接著就是標準的找輪廓(Find Contours)、找尋凸包(Convex Hull)、找尋凸缺陷(Convexity Defects),就可以找到手的幾個辨識關鍵,實作這個方法後感覺還可以但是就沒有發揮到Kinect的功能,不過昨天在GitHub看到這篇,有個我覺得很高明的地方也利用到Kinect的特點,就是直接把手的深度範圍當作閾值獨立出手的部分算是蠻準的,其餘的處理就都是一樣了,玩Kinect一個禮拜感覺上可以發揮的點很多。





wxKinect.h


#ifndef __WX_KINECT__
#define  __WX_KINECT__

#if _MSC_VER < 1600
typedef __int64 int64_t;
typedef unsigned __int64 uint64_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int16 uint16_t;
typedef unsigned __int8 uint8_t;
#else
#include <stdint.h>
#endif

#include <wx/wx.h>

#include <cv.h>
#include <highgui.h>

#include <NiTE.h>

const int SCREEN_WIDTH = 640;
const int SCREEN_HIGHT = 480;
/*
const int ROI_MAX_SIZE_X = 540;
const int ROI_MAX_SIZE_Y = 380;
*/
const float DEPTH_SACLE = 255.0f / 4096.0f;
const int HAND_ROI_SIZE = 100;

const int LIKELY_THE_HAND_AREA = 2000;
const float HAND_IS_GRASPING = 0.8f;

const int DEPTH_RANGE = 7;

class App:public wxApp
{
public:
bool OnInit();
};

class Frame:public wxFrame
{
public:
Frame(const wxString&);
~Frame();

void InitKinect();
void CreateUI();
void Display();

void OnExit(wxCommandEvent&);
private:
friend class Thread;
Thread *thread;

nite::HandTracker hand_tracker;
nite::HandTrackerFrameRef hand_tracker_frame;

wxPanel *depth_screen;
wxPanel *hand_screen;

DECLARE_EVENT_TABLE()
};

BEGIN_EVENT_TABLE(Frame,wxFrame)
EVT_MENU(wxID_EXIT,Frame::OnExit)
END_EVENT_TABLE()

class Thread:public wxThread
{
public:
Thread(Frame*);

void* Entry();
private:
Frame *frame;
};

#endif



wxKinect.cpp


#include "wxKinect.h"

DECLARE_APP(App)
IMPLEMENT_APP(App)

bool App::OnInit()
{
Frame *frame = new Frame(wxT("wxKinect - Hand Detect"));

frame->Show(true);

return true;
}

Frame::Frame(const wxString &title):wxFrame(NULL,wxID_ANY,title,wxDefaultPosition,wxSize(700,800),wxMINIMIZE_BOX | wxCLOSE_BOX | wxCAPTION | wxSYSTEM_MENU)
{
InitKinect();
CreateUI();

thread = new Thread(this);
thread->Create();
thread->Run();
}

void Frame::CreateUI()
{
wxMenu *file = new wxMenu;
file->Append(wxID_EXIT,wxT("E&xit\tAlt-q"),wxT("exit"));

wxMenuBar *bar = new wxMenuBar;
bar->Append(file,wxT("file"));
SetMenuBar(bar);

wxBoxSizer *top = new wxBoxSizer(wxVERTICAL);
this->SetSizer(top);

wxBoxSizer *screen_box = new wxBoxSizer(wxVERTICAL);
top->Add(screen_box,0,wxALIGN_CENTER_HORIZONTAL | wxALL,5);

depth_screen = new wxPanel(this,wxID_ANY,wxDefaultPosition,wxSize(SCREEN_WIDTH,SCREEN_HIGHT));
screen_box->Add(depth_screen,0,wxALIGN_CENTER_HORIZONTAL | wxALIGN_CENTER_VERTICAL,5);

hand_screen = new wxPanel(this,wxID_ANY,wxDefaultPosition,wxSize(HAND_ROI_SIZE * 2,HAND_ROI_SIZE * 2));
screen_box->Add(hand_screen,0,wxALIGN_CENTER_HORIZONTAL | wxALIGN_CENTER_VERTICAL,5);

CreateStatusBar(2);
SetStatusText(wxDateTime::Now().Format());
}

void Frame::InitKinect()
{
nite::NiTE::initialize();

hand_tracker.create();
hand_tracker.startGestureDetection(nite::GESTURE_CLICK);
hand_tracker.startGestureDetection(nite::GESTURE_WAVE);
hand_tracker.startGestureDetection(nite::GESTURE_HAND_RAISE);
}

Frame::~Frame()
{
thread->Delete();

//hand_tracker.destroy();
nite::NiTE::shutdown();
}

void Frame::Display()
{
hand_tracker.readFrame(&hand_tracker_frame);

cv::Mat depth_mat(hand_tracker_frame.getDepthFrame().getHeight(),hand_tracker_frame.getDepthFrame().getWidth(),CV_16UC1,(void*)hand_tracker_frame.getDepthFrame().getData());
depth_mat.convertTo(depth_mat,CV_8UC1,DEPTH_SACLE);

const nite::Array<nite::GestureData> &gestures = hand_tracker_frame.getGestures();
CvPoint2D32f position;

for(int i = 0;i < gestures.getSize();++i){

if(gestures[i].isComplete()){

const nite::Point3f &pos = gestures[i].getCurrentPosition();
nite::HandId hand_id;
hand_tracker.startHandTracking(pos,&hand_id);
}
}

const nite::Array<nite::HandData> &hands = hand_tracker_frame.getHands();

for(int i = 0;i < hands.getSize();++i){

const nite::HandData hand = hands[i];

if(hand.isTracking()){ //如果跟蹤到手的運動

const nite::Point3f &pos = hand.getPosition();
hand_tracker.convertHandCoordinatesToDepth(pos.x,pos.y,pos.z,&position.x,&position.y);
float hand_depth = pos.z * DEPTH_SACLE;

/*
* 將ROI設定在手的大小左右,
* 而且ROI範圍不可以超出擷取的影像否則會丟出Exception。
*/
cv::Rect hand_roi;
hand_roi.width = HAND_ROI_SIZE * 2;
hand_roi.height = HAND_ROI_SIZE * 2;
hand_roi.x = position.x - HAND_ROI_SIZE;
hand_roi.y = position.y - HAND_ROI_SIZE;
int ROI_MAX_SIZE_X = SCREEN_WIDTH - (HAND_ROI_SIZE * 2);
int ROI_MAX_SIZE_Y = SCREEN_HIGHT - (HAND_ROI_SIZE * 2);
if(hand_roi.x < 0){hand_roi.x = 0;}
if(hand_roi.x > ROI_MAX_SIZE_X){hand_roi.x = ROI_MAX_SIZE_X;}
if(hand_roi.y < 0){hand_roi.y = 0;}
if(hand_roi.y > ROI_MAX_SIZE_Y){hand_roi.y = ROI_MAX_SIZE_Y;}

cv::Mat hand_roi_mat(cv::Mat(depth_mat,hand_roi).clone());
hand_roi_mat = (hand_roi_mat > (hand_depth - DEPTH_RANGE)) & (hand_roi_mat < (hand_depth + DEPTH_RANGE)); //這裡是關鍵,二值化的閾值取決於手的深度範圍。

cv::medianBlur(hand_roi_mat,hand_roi_mat,5); //做中值濾波使邊緣明顯
cv::Mat hand_roi_debug;
hand_roi_debug = hand_roi_mat.clone();
cvtColor(hand_roi_debug,hand_roi_debug,CV_GRAY2RGB);

std::vector<std::vector<cv::Point> > contours;
cv::findContours(hand_roi_mat,contours,CV_RETR_LIST,CV_CHAIN_APPROX_SIMPLE); //找尋所有可能的多邊形邊緣

if(contours.size()){ //如果有找到

for (int i = 0;i < contours.size();i++){ //則迭代每個多邊形邊緣

std::vector<cv::Point> contour = contours[i];
cv::Mat contour_mat = cv::Mat(contour);
double contour_area = cv::contourArea(contour_mat); //計算該多邊形面積

if(contour_area > LIKELY_THE_HAND_AREA){ //如果大於這個值則可能是手的面積

cv::Scalar center = mean(contour_mat);
cv::Point center_point = cv::Point(center.val[0],center.val[1]);

std::vector<cv::Point> approx_curve;
cv::approxPolyDP(contour_mat,approx_curve,10,true); //逼近該多邊形的邊緣

std::vector<std::vector<cv::Point> > contour_vector;
contour_vector.push_back(approx_curve);
cv::drawContours(hand_roi_debug,contour_vector,0,CV_RGB(255,0,0),3); //畫出該多邊形的邊緣

/*
* 找尋凸包(Convex Hull)並畫出點。
*/
std::vector<int> hull;
cv::convexHull(cv::Mat(approx_curve),hull,false,false);
for(int j = 0;j < hull.size();j++){
int index = hull[j];
cv::circle(hand_roi_debug,approx_curve[index],3,CV_RGB(0,255,0),2);
}

/*
* 找尋凸缺陷(Convexity Defects)並畫出點。
*/
std::vector<CvConvexityDefect> convex_defects;
CvSeq* contour_points;
CvSeq* defects;
CvMemStorage* storage;
CvMemStorage* str_defects;
CvMemStorage* contour_str;
CvConvexityDefect *defect_array = 0;
str_defects = cvCreateMemStorage();
defects = cvCreateSeq(CV_SEQ_KIND_GENERIC|CV_32SC2, sizeof(CvSeq),sizeof(CvPoint),str_defects);
contour_str = cvCreateMemStorage();
contour_points = cvCreateSeq(CV_SEQ_KIND_GENERIC | CV_32SC2,sizeof(CvSeq),sizeof(CvPoint),contour_str);
for(int j = 0; j < (int)approx_curve.size(); j++) {
CvPoint cp = {approx_curve[j].x,approx_curve[j].y};
cvSeqPush(contour_points, &cp);
}
int count = (int)hull.size();
int *convert_hull = (int*)malloc(count * sizeof(int));
for(int j = 0;j < count;j++){
convert_hull[j] = hull.at(j);
}
CvMat hull_mat = cvMat(1,count,CV_32SC1,convert_hull);
storage = cvCreateMemStorage(0);
defects = cvConvexityDefects(contour_points, &hull_mat,storage);
defect_array = (CvConvexityDefect*)malloc(sizeof(CvConvexityDefect)*defects->total);
cvCvtSeqToArray(defects,defect_array,CV_WHOLE_SEQ);
for(int j = 0;j < defects->total;j++){
CvConvexityDefect def;
def.start       = defect_array[j].start;
def.end         = defect_array[j].end;
def.depth_point = defect_array[j].depth_point;
def.depth       = defect_array[j].depth;
convex_defects.push_back(def);
}
for(int j = 0;j < convex_defects.size();j++){
cv::circle(hand_roi_debug,cv::Point(convex_defects[j].depth_point->x,convex_defects[j].depth_point->y),3,CV_RGB(0,0,255),2);
}
cvReleaseMemStorage(&contour_str);
cvReleaseMemStorage(&str_defects);
cvReleaseMemStorage(&storage);
free(defect_array);

/*
* 這裡也算關鍵,直接把逼近的面積除以凸包的面積得到的值來決定手是張開還是合閉。
*/
std::vector<cv::Point> hull_points;
for(int j = 0;j < hull.size();j++){
int curve_index = hull[j];
cv::Point p = approx_curve[curve_index];
hull_points.push_back(p);
}
double hull_area  = cv::contourArea(cv::Mat(hull_points));
double curve_area = cv::contourArea(cv::Mat(approx_curve));
double hand_ratio = curve_area / hull_area;
if(hand_ratio > HAND_IS_GRASPING){
cv::circle(hand_roi_debug,center_point,5,CV_RGB(255,0,255),5); //張手就在手中心畫出淺綠的點
}
else{
cv::circle(hand_roi_debug,center_point,5,CV_RGB(100,220,80),5); //閉手就在手中心畫出粉紅的點
}

IplImage hand_image(hand_roi_debug);
wxClientDC hand_dc(hand_screen);
cvConvertImage(&hand_image,&hand_image,CV_CVTIMG_SWAP_RB);
unsigned char *data;
cvGetRawData(&hand_image,&data);
wxImage *image = new wxImage(hand_image.width,hand_image.height,data,true);
wxBitmap *bitmap = new wxBitmap(*image);
int x,y,width,height;
hand_dc.GetClippingBox(&x,&y,&width,&height);
hand_dc.DrawBitmap(*bitmap,x,y);
delete image;
delete bitmap;
}
}
}
//imwrite("hand.jpg",hand_roi_image);
}
}

IplImage depth_image(depth_mat);
IplImage *convert_image = cvCreateImage(cvGetSize(&depth_image),IPL_DEPTH_8U,3);
cvCvtColor(&depth_image,convert_image,CV_GRAY2BGR);

wxClientDC depth_dc(depth_screen);
cvConvertImage(convert_image,convert_image,CV_CVTIMG_SWAP_RB);
unsigned char *data;
cvGetRawData(convert_image,&data);
wxImage *image = new wxImage(convert_image->width,convert_image->height,data,true);
wxBitmap *bitmap = new wxBitmap(*image);
int x,y,width,height;
depth_dc.GetClippingBox(&x,&y,&width,&height);
depth_dc.DrawBitmap(*bitmap,x,y);

delete image;
delete bitmap;
cvReleaseImage(&convert_image);
}

void Frame::OnExit(wxCommandEvent &event)
{
Close();
}

Thread::Thread(Frame *parent):wxThread(wxTHREAD_DETACHED)
{
frame = parent;
}

void* Thread::Entry()
{
while(!TestDestroy()){
frame->Display();
}

return NULL;
}









參考: