最近想在Library中加入測試SSE指令集支援程度,然後看到這篇有cpuid這指令可用,功能選擇跟中斷很像根據eax決定。
Search
2012年12月25日 星期二
2012年12月18日 星期二
wxWidgets Connect Arduino(Windows)
跟之前的wxWidgets Connect Arduino(Linux)一樣,只不過變成Windows版,因為只用Serial Port所以Enumeration就直接一個一個Create,如果改天要用其他如USB的Device寫成Driver會比較好處理,最後非同步有開/Od要處理WriteFile的ERROR_IO_PENDING而開/O2不用,另外我是在Windows 7 x64測試。
2012年12月7日 星期五
Arduino - SD Card
趁這兩天工作又被隊友delay又跑去研究其他姿態穩定的濾波演算法,Arduino IDE和Library好像沒有把數據存成文字檔的功能,也不想自己另外弄個receive程式,所以還是直接用SD Card去存,然後將文字資料丟到Google Doc就能製作成曲線圖方便觀察濾波效果,不過SPI我很少用所以常常忘記接法,所以還是記錄上來。
Arduino SD Card
Digital 4 (XCK) ----> CS
Digital 11(MOSI) ----> MOSI(DI)
Digital 12(MISO) ----> MISO(DO)
Digital 13(SCK) ----> SCLK(CK)
VCC 3.3V ----> VCC
GND ----> GND
存Heartbeat Sensor的值製成曲線圖。
#include <SD.h>
int sensor_pin = 0;
int n = 0;
int n_1 = 0;
int diff = 0;
int count = 0;
int elapse_up = 0;
int elapse_down = 0;
int integral_plus = 0;
int integral_minus = 0;
void setup()
{
Serial.begin(9600);
pinMode(10,OUTPUT);
if(!SD.begin(4)){
Serial.println("SD Card Error!");
return ;
}
}
void loop()
{
File file = SD.open("data.txt",FILE_WRITE);
n = analogRead(sensor_pin); //取得現在得到的值
diff = (n - n_1); //將現在的值減去上次得到的值(N -(N-1))
n_1 = n; //更新上一次的值
file.println(n / 5);
//如果差值趨近零,則表示到達波谷又要進到新的一個PWM中。
//並且在確定上個PWM的爬升(integral_plus)與下降(integral_minus)
//是否有到達積分值來確定是否有PWM。
//兩者成立則更新所有狀態。
if(diff < 10 && diff > -10 &&
integral_plus > 160 && integral_minus < -200){
if(elapse_up > 50 && elapse_up < 400 &&
elapse_down > 96 && elapse_down < 800){
++count;
Serial.print(count);
Serial.println(" Heartbeat");
}
elapse_up = 0;
elapse_down = 0;
integral_plus = 0;
integral_minus = 0;
}
else if(diff > 20 && diff < 280){
//PWM上升
integral_plus += diff;
elapse_up += 20;
}
else if(diff < -20 && diff > -200){
//PWM下降
integral_minus += diff;
elapse_down += 20;
}
file.close();
//每20ms取樣,即一秒取樣50次。
delay(20);
}
Kalman Filter
基本上所有參數跟成員的意思跟Simple Kalman Filter都差不多(參考預測與更新七個步驟),初始化r_measure是觀測誤差q_angle與q_bias是在加速度計與陀螺儀的預測選擇較相信誰arg_angle是初始角度,而member function 的 Update沒有空行的code表示是同一組算式,第一組計算當前陀螺儀的角速度減去前次計算出的偏移,接著將角度加上這個差的角度,第二組P是一個2x2 Matrix,內容是推導後的結果用來計算預測估計誤差,第三組量測現在加速度計角度與上次角度的差,第四組計算預測與觀測的Covariance,第五組計算最佳卡爾曼增益,第六組由卡爾曼增益乘上前次與這次角度差決定角度與偏移要更新多少,第七組則是用卡爾曼增益乘上當前預測估計誤差然後更新下個預測估計誤差,這七個步驟的前兩個是計算預測,後五個才是真正的更新,順序都與上面的wiki參考預測與更新七個步驟一樣,然後這整個系統的觀測時間統計是個隱馬爾可夫模型。
Kalman Filter:
Used:
下面初始參數適用幾乎所有的IMU除非有特別要自己設定。
Acc只有加速度計,CF用互補濾波加上陀螺儀修正,KF用卡爾曼濾波加上陀螺儀修正。
Kalman Filter:
#ifndef __KALMAN_FILTER__
#define __KALMAN_FILTER__
class KalmanFilter
{
public:
KalmanFilter(double r_measure,double q_angle,double q_bias,double arg_angle)
{
Q_angle = q_angle;
Q_bias = q_bias;
R_measure = r_measure;
angle = arg_angle;
bias = 0;
P[0][0] = 0;
P[0][1] = 0;
P[1][0] = 0;
P[1][1] = 0;
}
/* Accelerometer Angle , Gyro Rate , dt */
double Update(double acc,double gyro,double dt)
{
rate = gyro - bias;
angle += rate * dt;
P[0][0] += dt * (dt * P[1][1] - P[0][1] - P[1][0] + Q_angle);
P[0][1] -= dt * P[1][1];
P[1][0] -= dt * P[1][1];
P[1][1] += Q_bias * dt;
angle_diff = acc - angle;
S = P[0][0] + R_measure;
K[0] = P[0][0] / S;
K[1] = P[1][0] / S;
angle += K[0] * angle_diff;
bias += K[1] * angle_diff;
P[0][0] -= K[0] * P[0][0];
P[0][1] -= K[0] * P[0][1];
P[1][0] -= K[1] * P[0][0];
P[1][1] -= K[1] * P[0][1];
return angle;
}
private:
double R_measure;
double Q_angle;
double Q_bias;
double angle;
double bias;
double rate;
double angle_diff;
double S;
double P[2][2];
double K[2];
};
#endif
Used:
下面初始參數適用幾乎所有的IMU除非有特別要自己設定。
KalmanFilter KFx(0.03f,0.001f,0.003f,0.0f);
KalmanFilter KFy(0.03f,0.001f,0.003f,0.0f);
unsigned long last_KF_computing_time; //us
void loop()
{
double KF_x_angle = KFx.Update(AccAngleX,GyroRateYaw,
(double)(micros() - last_KF_computing_time) / 1000000.0f);
double KF_y_angle = KFx.Update(AccAngleY,GyroRatePitch,
(double)(micros() - last_KF_computing_time) / 1000000.0f);
last_KF_computing_time = micros();
}
2012年12月6日 星期四
Complementary Filter
互補濾波是另一種比較簡單取得平滑的採樣值的方法,計算起來比卡爾曼濾波簡單,要將兩個感測器的值互補只要對一方做高通濾波(陀螺儀瞬時動態較靈敏所以高頻的值較有效)一方做低通濾波(加速度計長時間靜態的值較準確所以低頻的值較有效)然後相加,但兩者乘上的係數相加要等於一,這篇文章有比較詳細的說明我是參考他的,將陀螺儀的角速度乘上時間微分dt然後連續積分就可以得到該時間累積的角度,再與加速度計的角度做互補濾波即可,另外WMP與Wii Nunchuk使用延伸模式時對I2C操作跟單純使用單一個不一樣(參考Activated Wii Motion Plus in Nunchuck passthrough mode),使用這個模式兩者好像會有3ms左右的採樣延遲(MultiWii Source Code - IMU.ino內的computeIMU注解說的,我沒計算過驗證)。
方向
Acc只有加速度計Noise突出很多,AccGyro有用陀螺儀做互補所以較平滑。
Complementary Filter.ino
#include <Wire.h>
uint8_t buffer[6];
int xID;
double AccAngleX = 0,AccAngleY = 0;
int GyroLastYaw = 0,GyroLastPitch = 0,GyroLastRoll = 0;
double GyroAngleYaw = 0,GyroAnglePitch = 0,GyroAngleRoll = 0;
double HighPass = 0.93f,LowPass = 0.07f;
double AngleX = 0,AngleY = 0;
unsigned long last_loop_time; //ms
unsigned long last_gyro_computing_time; //us
void setup()
{
Serial.begin(115200);
Wire.begin();
Init6DOF();
last_loop_time = millis();
last_gyro_computing_time = micros();
}
void loop()
{
if(millis() > (last_loop_time + 10)){
IICRead(buffer,0x52,6);
if((buffer[5] & 0x03) == 0x00){
ComputingNunchuk(buffer);
}
else if((buffer[5] & 0x03) == 0x02){
ComputingWMP(buffer);
}
ComplementaryFilter();
IICWrite(0x52,0x00);
last_loop_time = millis();
}
}
void Init6DOF()
{
delay(100);
IICWrite(0x53,0xFE,0x05);
Serial.println("Passthrough Mode Ok!");
delay(100);
IICWrite(0x53,0xF0,0x55);
Serial.println("Init WMP Ok!");
delay(100);
IICWrite(0x52,0xFA);
Serial.println("Set Reading Address 0xFA Ok!");
delay(100);
IICRead(buffer,0x52,6);
xID = buffer[0] + buffer[1] + buffer[2] +
buffer[3] + buffer[4] + buffer[5];
Serial.print("Extension Controller xID = 0x");
Serial.println(xID,HEX);
if(xID == 0xCB){
Serial.println("WMP Connected But Not Avtivared!");
}
if(xID == 0xCE){
Serial.println("WMP Connected And Avtivared!");
}
if(xID == 0x00){
Serial.println("WMP Not Connected!");
}
delay(100);
IICWrite(0x52,0x8);
Serial.println("Set Reading Address 0x08 Ok!");
delay(100);
IICWrite(0x52,0x00);
}
double Map(double value,double Input_Min,double Input_Max,double Output_Min,double Output_Max)
{
double rValue = (value - Input_Min) * (Output_Max - Output_Min) / (Input_Max - Input_Min) + Output_Min;
double rMin,rMax;
if(Output_Min < Output_Max){
rMin = Output_Min;
rMax = Output_Max;
}
else{
rMin = Output_Max;
rMax = Output_Min;
}
if(rValue < rMin){
return rMin;
}
if(rValue > rMax){
return rMax;
}
return rValue;
}
void ComputingNunchuk(uint8_t *buf)
{
int accel_x_axis = (buf[2] << 2) + ((buf[5] >> 3) & 2);
int accel_y_axis = (buf[3] << 2) + ((buf[5] >> 4) & 2);
int accel_z_axis = (buf[4] << 2) + ((buf[5] >> 5) & 6);
int mapX = Map(accel_x_axis,300.0f,700.0f,-90.0f,90.0f);
int mapY = Map(accel_y_axis,300.0f,700.0f,-90.0f,90.0f);
int mapZ = Map(accel_z_axis,360.0f,760.0f,-90.0f,90.0f);
AccAngleX = atan2(mapX,mapZ) / 3.14159 * 180.0f;
AccAngleY = atan2(mapY,mapZ) / 3.14159 * 180.0f;
/*
Serial.print(accel_x_axis);
Serial.print(" ");
Serial.print(accel_y_axis);
Serial.print(" ");
Serial.println(accel_z_axis);
*/
}
void ComputingWMP(uint8_t *buf)
{
int yaw = (((buf[5] & 0xFC) << 6) + buf[0]);
int pitch = (((buf[4] & 0xFC) << 6) + buf[1]);
int roll = (((buf[3] & 0xFC) << 6) + buf[2]);
double GyroDiffYaw = (yaw - GyroLastYaw) / 14.375f;
double GyroDiffPitch = (pitch - GyroLastPitch) / 14.375f;
double GyroDiffRoll = (roll - GyroLastRoll) / 14.375f;
GyroAngleYaw = GyroDiffYaw *
(double)(micros() - last_gyro_computing_time) / 1000000.0f;
GyroAnglePitch = GyroDiffPitch *
(double)(micros() - last_gyro_computing_time) / 1000000.0f;
GyroAngleRoll = GyroDiffRoll *
(double)(micros() - last_gyro_computing_time) / 1000000.0f;
last_gyro_computing_time = micros();
GyroLastYaw = yaw;
GyroLastPitch = pitch;
GyroLastRoll = roll;
/*
Serial.print(yaw);
Serial.print(" ");
Serial.print(pitch);
Serial.print(" ");
Serial.println(roll);
*/
/*
Serial.print(GyroAngleYaw);
Serial.print(" ");
Serial.print(GyroAnglePitch);
Serial.print(" ");
Serial.println(GyroAngleRoll);
*/
}
void ComplementaryFilter()
{
AngleX = (HighPass * (AngleX + GyroAngleYaw)) + (LowPass * AccAngleX);
AngleY = (HighPass * (AngleY + GyroAnglePitch)) + (LowPass * AccAngleY);
Serial.print(AngleX);
Serial.print(" ");
Serial.println(AngleY);
}
void IICWrite(uint8_t address,uint8_t register_address)
{
Wire.beginTransmission(address);
Wire.write(register_address);
Wire.endTransmission();
}
void IICWrite(uint8_t address,uint8_t register_address,uint8_t data)
{
Wire.beginTransmission(address);
Wire.write(register_address);
Wire.write(data);
Wire.endTransmission();
}
void IICRead(uint8_t *buf,uint8_t address,uint8_t length)
{
Wire.requestFrom(address,length);
for(int i = 0;Wire.available();++i){
buf[i] = Wire.read();
}
}
2012年12月2日 星期日
Windows Driver Loader
這個月有個工作是要用隊友給的driver去對某個設備做I/O操作控制,不過這幾天發現有時DriverMonitor掛驅動有的路徑會有問題,看call CreateService建在 HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services的登錄檔ImagePath可能因為編碼問題有點錯誤,也懶得寫INF而且我不太會用工具產生XD,還有個問題我不知道是什麼原因,用看系統service運作的工具理應掛載的測試driver應該不存在了,可是似乎無法重新用DriverMonitor啟動,非得重新開機才可以,前天為了這幾個問題還花幾小時去re DriverMonitor研究幾個function,不過看不出來流程有什麼差異或問題,但還是自己寫了一個常規Loader去處理...話說隊友給的driver還是一直有問題對I/O寫資料給設備有時還是會BSoD.........Orz
2012年11月16日 星期五
Visual CAPTCHA Detection
幾個禮拜前上課寫個去除背景的demo給人看後沒有在碰了,所以就放上來等哪天想改就有,對於圖形驗證碼感覺上如果背景夠複雜,去除背景的演算法複雜度就很高,至於雜訊一些去除線條的演算法或者濾波可以容易去除常見的雜訊,文字本身扭曲的話可以讓Tesseract之類的OCR Library辨識率降低(即使背景、雜訊完整去除),而一些方法的閾值也是要看情況調整,總結來說個人感覺是背景複雜求平均值不容易,而且可能得將背景分區快去除,但針對設計應該是還是可以去除大部分只是複雜度的問題,但如果文字扭曲變形到一定程度似乎常見OCR Library辨識也不易,所以感覺上文字扭曲是必須的,當然最好還是不要用同系列工具生產的驗證圖片,最好混和多種系列使用,不過沒有試過用ML訓練萬筆樣本測試是否能有效提昇辨識度就是了,這裡和這裡有對驗證碼類型的分析。
extractor.h
extractor.cpp
main.cpp
(這組失敗,7和8被當成同範圍)
extractor.h
#ifndef __EXTRACTOR__
#define __EXTRACTOR__
#include<cv.h>
#include<cxcore.h>
#include<highgui.h>
#include <iostream>
#include <map>
class Extractor
{
public:
Extractor(char*);
long Average(); //It depends
void BackgroundErase(); //It depends
void ColorFull();
void NoiseClear(); //It depends
void Cut();
private:
long avg;
IplImage *image;
};
#endif
extractor.cpp
#include "extractor.h"
Extractor::Extractor(char *name)
{
image = cvLoadImage(name,1);
}
long Extractor::Average()
{
cvSmooth(image,image,CV_BLUR);
avg = 0;
for(int y = 0;y < 3;++y){
for(int x = 0;x < 3;++x){
long temp = 0;
temp += image->imageData[y * image->widthStep + (x * 3)] + image->imageData[y * image->widthStep + (x * 3) + 1] + image->imageData[y * image->widthStep + (x * 3) + 2];
avg += temp / 3;
}
}
for(int y = image->height - 1;y > image->height - 4;--y){
for(int x = 0;x < 3;++x){
long temp = 0;
temp += image->imageData[y * image->widthStep + (x * 3)] + image->imageData[y * image->widthStep + (x * 3) + 1] + image->imageData[y * image->widthStep + (x * 3) + 2];
avg += temp / 3;
}
}
for(int y = 0;y < 3;++y){
for(int x = image->width - 1;x > image->width - 4;--x){
long temp = 0;
temp += image->imageData[y * image->widthStep + (x * 3)] + image->imageData[y * image->widthStep + (x * 3) + 1] + image->imageData[y * image->widthStep + (x * 3) + 2];
avg += temp / 3;
}
}
for(int y = image->height - 1;y > image->height - 4;--y){
for(int x = image->width - 1;x > image->width - 4;--x){
long temp = 0;
temp += image->imageData[y * image->widthStep + (x * 3)] + image->imageData[y * image->widthStep + (x * 3) + 1] + image->imageData[y * image->widthStep + (x * 3) + 2];
avg += temp / 3;
}
}
avg /= 36;
return avg ;
}
void Extractor::BackgroundErase()
{
long range = 20;
for(int y = 0;y < image->height;++y){
for(int x = 0;x < image->widthStep;++x){
long temp = (image->imageData[y * image->widthStep + x] +
image->imageData[y * image->widthStep + x + 1] +
image->imageData[y * image->widthStep + x + 2]) / 3;
if(temp < (avg + range) && temp > (avg - range)){
image->imageData[y * image->widthStep + x] = image->imageData[y * image->widthStep + x + 1] = image->imageData[y * image->widthStep + x + 2] = 255;
}
}
}
}
void Extractor::ColorFull()
{
for(int y = 0;y < image->height;++y){
for(int x = 0;x < image->widthStep - 3;x += 3){
long temp = (image->imageData[y * image->widthStep + x] +
image->imageData[y * image->widthStep + x + 1] +
image->imageData[y * image->widthStep + x + 2]) / 3;
if(temp > 0){
image->imageData[y * image->widthStep + x] = image->imageData[y * image->widthStep + x + 1] = image->imageData[y * image->widthStep + x + 2] = 0;
}
}
}
}
void Extractor::NoiseClear()
{
int range = 20;
IplImage *itemp = cvCreateImage(cvGetSize(image),IPL_DEPTH_8U,3);
cvCopy(image,itemp);
for(int y = 0;y < itemp->height;++y){
for(int x = 0;x < itemp->widthStep - 3;x += 3){
long temp = (itemp->imageData[y * itemp->widthStep + x] +
itemp->imageData[y * itemp->widthStep + x + 1] +
itemp->imageData[y * itemp->widthStep + x + 2]) / 3;
long ref = (itemp->imageData[(y - 1) * itemp->widthStep + x] +
itemp->imageData[(y - 1) * itemp->widthStep + x + 1] +
itemp->imageData[(y - 1) * itemp->widthStep + x + 2] / 3)
+
(itemp->imageData[(y + 1) * itemp->widthStep + x] +
itemp->imageData[(y + 1) * itemp->widthStep + x + 1] +
itemp->imageData[(y + 1) * itemp->widthStep + x + 1] / 3)
+
(itemp->imageData[y * itemp->widthStep + (x + 3)] +
itemp->imageData[y * itemp->widthStep + (x + 3) + 1] +
itemp->imageData[y * itemp->widthStep + (x + 3) + 2] / 3)
+
(itemp->imageData[y * itemp->widthStep + (x - 3)] +
itemp->imageData[y * itemp->widthStep + (x - 3) + 1] +
itemp->imageData[y * itemp->widthStep + (x - 3) + 2] / 3)
+
(itemp->imageData[(y + 1) * itemp->widthStep + (x - 3)] +
itemp->imageData[(y + 1) * itemp->widthStep + (x - 3) + 1] +
itemp->imageData[(y + 1) * itemp->widthStep + (x - 3) + 1] / 3)
+
(itemp->imageData[(y + 1) * itemp->widthStep + (x + 3)] +
itemp->imageData[(y + 1) * itemp->widthStep + (x + 3) + 1] +
itemp->imageData[(y + 1) * itemp->widthStep + (x + 3) + 1] / 3)
+
(itemp->imageData[(y - 1) * itemp->widthStep + (x + 3)] +
itemp->imageData[(y - 1) * itemp->widthStep + (x + 3) + 1] +
itemp->imageData[(y - 1) * itemp->widthStep + (x + 3) + 2] / 3)
+
(itemp->imageData[(y - 1) * itemp->widthStep + (x - 3)] +
itemp->imageData[(y - 1) * itemp->widthStep + (x - 3) + 1] +
itemp->imageData[(y - 1) * itemp->widthStep + (x - 3) + 2] / 3)
;
ref /= 8;
if((temp + range) < ref){
image->imageData[y * image->widthStep + x] = image->imageData[y * image->widthStep + x + 1] = image->imageData[y * image->widthStep + x + 2] = 255;
}
}
}
}
void Extractor::Cut()
{
IplImage *gray = cvCreateImage(cvSize(image->width,image->height),IPL_DEPTH_8U ,1);
cvCvtColor(image,gray,CV_RGB2GRAY);
cvCanny(gray,gray,30,90);
cvSmooth(gray,gray,CV_BLUR);
cvSmooth(gray,gray,CV_BLUR);
cvSmooth(gray,gray,CV_BLUR);
std::multimap<int,CvRect> list;
CvMemStorage* storage = cvCreateMemStorage( 0 );
CvSeq* contours = NULL;
cvFindContours(gray, storage, &contours, sizeof( CvContour ), CV_RETR_LIST,CV_CHAIN_APPROX_NONE);
for( ; contours != NULL; contours = contours->h_next ){
CvRect rect = cvBoundingRect( contours, 0 );
//cvRectangle(image, cvPoint( rect.x, rect.y ),cvPoint( rect.x + rect.width, rect.y + rect.height ), cvScalar(0,0,255), 0 );
list.insert(std::make_pair(rect.width * rect.height,rect));
}
//cvSaveImage("find.jpg",image);
int captcha_numbers = 5;
std::multimap<int,CvRect> result;
std::multimap<int,CvRect>::reverse_iterator rit = list.rbegin();
for(int i = 0;rit != list.rend() && i < captcha_numbers;++rit,++i){
result.insert(std::make_pair(rit->second.x,rit->second));
}
char name[] = "1.jpg";
std::multimap<int,CvRect>::iterator it = result.begin();
for(;it != result.end();++it){
cvSetImageROI(image,it->second);
cvSaveImage(name,image);
name[0] += 1;
//std::cout << it->first << std::endl;
}
}
main.cpp
#include "extractor.h"
int main(int argc,char**argv)
{
char *name = argv[1];
if(name){
Extractor extractor(name);
/*************pipeline*************/
extractor.Average();
extractor.BackgroundErase();
extractor.ColorFull();
extractor.NoiseClear();
extractor.Cut();
}
return 0;
}
針對的樣本:
(這組失敗,7和8被當成同範圍)
2012年11月6日 星期二
累人...
感覺最近又再重複做去年同樣這個時間在做的事,一直覺得太規律的事情不適合我做,工作也好學校也好太規律就渾身不自在啊啊...前些日子把很久以前寫的繪圖引擎改寫之後先試著做2D ACT遊戲引擎驗證,如果把怪物AI跟物理部分獨立出來用LUA之類的腳本個別設計就可以做任何ACT,等有時間再慢慢來寫另一個3D RPG引擎驗證。
在沒屋頂買了一個很有趣的東西叫VFD管,整個很漂亮等找人弄到Driver IC再來設計PCB弄個時鐘。
還有很多東西要學要研究偏偏時間很難把握,其實我自己對時間管理不是很擅長希望到過年前能完成大部分。
在沒屋頂買了一個很有趣的東西叫VFD管,整個很漂亮等找人弄到Driver IC再來設計PCB弄個時鐘。
還有很多東西要學要研究偏偏時間很難把握,其實我自己對時間管理不是很擅長希望到過年前能完成大部分。
2012年10月13日 星期六
wxWidgets With DirectX9 HLSL
今天翻到以前用MFC+D3D9寫練習用的繪圖引擎,想改寫成wxWidgets版本,只要用wxWindow的GetHandle得到該平台的Handle就可以Create D3D Device了,還有一個是GetHWND意思一樣,要Create D3D10 Device也一樣,然後設定Vertex Shader or Pixel Shader就是D3D Device的事了。
render_data.h
shader.hlsl
wxDirectX9.h
wxDirectX9.cpp
render_data.h
#ifndef __RENDER_DATA__
#define __RENDER_DATA__
#define SAFE_RELEASE(x) if (x){x->Release();x = NULL;}
struct VertexTexture
{
float Position[3];
float Texcoord[2];
};
VertexTexture quad[4] =
{
{{-1.0f, -1.0f, 0.0f},{0.0f, 1.0f}},
{{ 1.0f, -1.0f, 0.0f},{1.0f, 1.0f}},
{{-1.0f, 1.0f, 0.0f},{0.0f, 0.0f}},
{{ 1.0f, 1.0f, 0.0f},{1.0f, 0.0f}}
};
#endif
shader.hlsl
sampler2D Sampler;
uniform float4x4 world_view_proj_matrix : register(c0);
struct VS_INPUT
{
float3 Position : POSITION;
float2 Texcoord : TEXCOORD;
};
struct VS_OUTPUT
{
float4 Position : POSITION;
float2 Texcoord : TEXCOORD0;
};
VS_OUTPUT VS(VS_INPUT In)
{
VS_OUTPUT Out;
Out.Position = mul(world_view_proj_matrix,float4(In.Position, 1.0f));
Out.Texcoord = In.Texcoord;
return Out;
}
float4 PS(VS_OUTPUT In) : COLOR
{
float4 color = tex2D(Sampler, In.Texcoord);
return color;
}
wxDirectX9.h
#ifndef __WX_DIRECT_X__
#define __WX_DIRECT_X__
#include <wx/wx.h>
#include <D3D9.h>
#include <D3DX9.h>
#include "render_data.h"
namespace{
HWND g_hwnd = NULL;
LPDIRECT3D9 g_pD3D = NULL;
LPDIRECT3DDEVICE9 g_pD3DDevice = NULL;
D3DPRESENT_PARAMETERS g_pD3DPresent;
LPDIRECT3DTEXTURE9 g_pTexture = NULL;
LPDIRECT3DVERTEXSHADER9 g_pVertexShaderDX9 = NULL;
LPDIRECT3DPIXELSHADER9 g_pPixelShaderDX9 = NULL;
}
class App:public wxApp
{
public:
bool OnInit();
int OnExit();
};
class Viewport:public wxPanel
{
public:
Viewport(wxWindow*,wxSize);
~Viewport();
void Reset(wxSizeEvent&);
void Updata(wxIdleEvent&);
void Render(wxEraseEvent&);
void InitDX9State();
private:
D3DXVECTOR3 eye;
D3DXVECTOR3 lookat;
D3DXVECTOR3 up;
D3DXMATRIX world_matrix;
D3DXMATRIX view_matrix;
D3DXMATRIX projection_matrix;
static const unsigned long fps = 60;
unsigned long begin,last;
float angle;
DECLARE_EVENT_TABLE()
};
class Frame:public wxFrame
{
public:
Frame(const wxString&,wxSize);
~Frame();
void OnExit(wxCommandEvent&);
bool InitD3D9Device();
void ReleaseD3D9Device();
LPDIRECT3DVERTEXSHADER9 LoadVertexShaderDX9(const wxString&,const wxString&,const wxString&);
LPDIRECT3DPIXELSHADER9 LoadPixelShaderDX9(const wxString&,const wxString&,const wxString&);
void CreateUI();
private:
Viewport *viewport;
DECLARE_EVENT_TABLE()
};
BEGIN_EVENT_TABLE(Viewport,wxPanel)
EVT_IDLE(Viewport::Updata)
EVT_ERASE_BACKGROUND(Viewport::Render)
EVT_SIZE(Viewport::Reset)
END_EVENT_TABLE()
BEGIN_EVENT_TABLE(Frame,wxFrame)
EVT_MENU(wxID_EXIT,Frame::OnExit)
END_EVENT_TABLE()
IMPLEMENT_APP(App)
DECLARE_APP(App)
#endif
wxDirectX9.cpp
#include "wxDirectX9.h"
bool App::OnInit()
{
Frame *frame = new Frame(wxT("wxDirectX9"),wxSize(512,512));
frame->Show(true);
return true;
}
int App::OnExit()
{
return wxApp::OnExit();
}
Frame::Frame(const wxString &title,wxSize size):wxFrame(NULL,wxID_ANY,title,wxDefaultPosition,size)
{
viewport = new Viewport(this,size);
g_hwnd = (HWND)viewport->GetHandle();
InitD3D9Device();
g_pVertexShaderDX9 = LoadVertexShaderDX9(wxT("shader.hlsl"),wxT("VS"),wxT("vs_3_0"));
g_pPixelShaderDX9 = LoadPixelShaderDX9(wxT("shader.hlsl"),wxT("PS"),wxT("ps_3_0"));
viewport->InitDX9State();
CreateUI();
}
Frame::~Frame()
{
ReleaseD3D9Device();
}
void Frame::CreateUI()
{
wxMenu *file = new wxMenu;
file->Append(wxID_EXIT,wxT("E&xit\tAlt-q"),wxT("Exit"));
wxMenuBar *bar = new wxMenuBar;
bar->Append(file,wxT("File"));
SetMenuBar(bar);
}
void Frame::OnExit(wxCommandEvent &event)
{
Close();
}
bool Frame::InitD3D9Device()
{
if(g_hwnd == NULL){
return false;
}
RECT rect;
GetWindowRect(g_hwnd,&rect);
float width = rect.right - rect.left;
float height = rect.bottom - rect.top;
if ( width == 0 || height == 0 ){
return false;
}
if( NULL == (g_pD3D = Direct3DCreate9(D3D_SDK_VERSION)) ){
return false;
}
ZeroMemory(&g_pD3DPresent, sizeof(g_pD3DPresent));
g_pD3DPresent.Windowed = TRUE;
g_pD3DPresent.SwapEffect = D3DSWAPEFFECT_DISCARD;
g_pD3DPresent.BackBufferFormat = D3DFMT_UNKNOWN;
g_pD3DPresent.BackBufferCount = 1;
g_pD3DPresent.EnableAutoDepthStencil = TRUE;
g_pD3DPresent.AutoDepthStencilFormat = D3DFMT_D24S8;
g_pD3DPresent.MultiSampleType = (D3DMULTISAMPLE_TYPE)0;
g_pD3DPresent.hDeviceWindow = g_hwnd;
bool device_initialized = false;
const int device_types = 4;
struct sDeviceType
{
D3DDEVTYPE type;
DWORD behavior;
};
sDeviceType device_type[device_types] =
{
{D3DDEVTYPE_HAL,D3DCREATE_HARDWARE_VERTEXPROCESSING},
{D3DDEVTYPE_HAL,D3DCREATE_MIXED_VERTEXPROCESSING},
{D3DDEVTYPE_HAL,D3DCREATE_SOFTWARE_VERTEXPROCESSING},
{D3DDEVTYPE_REF,D3DCREATE_SOFTWARE_VERTEXPROCESSING}
};
for (int type=0;type < device_types;++type)
{
if(g_pD3D->CreateDevice(D3DADAPTER_DEFAULT,device_type[type].type,g_hwnd,device_type[type].behavior,&g_pD3DPresent,&g_pD3DDevice) == D3D_OK)
{
device_initialized = true;
break;
}
}
return device_initialized;
}
void Frame::ReleaseD3D9Device()
{
SAFE_RELEASE(g_pD3DDevice);
SAFE_RELEASE(g_pD3D);
SAFE_RELEASE(g_pVertexShaderDX9);
SAFE_RELEASE(g_pPixelShaderDX9);
SAFE_RELEASE(g_pTexture);
}
LPDIRECT3DVERTEXSHADER9 Frame::LoadVertexShaderDX9(const wxString &filename,const wxString &entry,const wxString &profile)
{
LPD3DXBUFFER shader = NULL;
LPD3DXBUFFER error_msg = NULL;
DWORD flags = 0;
HRESULT result = D3DXCompileShaderFromFile(filename.wc_str(),NULL,NULL,entry.mb_str(),profile.mb_str(),flags,&shader,&error_msg,NULL);
if(result != S_OK){return NULL;}
LPDIRECT3DVERTEXSHADER9 vertex_shader = NULL;
result = g_pD3DDevice->CreateVertexShader((DWORD*)shader->GetBufferPointer(),&vertex_shader);
if(result != S_OK){return NULL;}
shader->Release();
return vertex_shader;
}
LPDIRECT3DPIXELSHADER9 Frame::LoadPixelShaderDX9(const wxString &filename,const wxString &entry,const wxString &profile)
{
LPD3DXBUFFER shader = NULL;
LPD3DXBUFFER error_msg = NULL;
DWORD flags = 0;
HRESULT result = D3DXCompileShaderFromFile(filename.wc_str(),NULL,NULL,entry.mb_str(),profile.mb_str(),flags,&shader,&error_msg,NULL);
if(result != S_OK){return NULL;}
LPDIRECT3DPIXELSHADER9 pixel_shader = NULL;
result = g_pD3DDevice->CreatePixelShader((DWORD*)shader->GetBufferPointer(),&pixel_shader);
if(result != S_OK){return NULL;}
shader->Release();
return pixel_shader;
}
Viewport::Viewport(wxWindow *parent,wxSize size):wxPanel(parent,wxID_ANY,wxDefaultPosition,size),eye(0.0f, 3.0f, 3.0f),lookat(0.0f, 0.0f, 0.0f),up(0.0f, -1.0f, 0.0f)
{
begin = last = 0;
angle = 0.0f;
}
Viewport::~Viewport()
{
}
void Viewport::InitDX9State()
{
int width,height;
GetSize(&width,&height);
float aspect = (float)height / (float)width;
D3DXMatrixPerspectiveFovRH(&projection_matrix,45.0f * (3.141592654f/180.0f),aspect,0.1f,10.0f);
D3DXMatrixLookAtRH(&view_matrix,&eye,&lookat,&up);
D3DXMatrixIdentity(&world_matrix);
D3DXMATRIX world_view_proj_matrix = world_matrix * view_matrix * projection_matrix;
g_pD3DDevice->SetVertexShaderConstantF(0,(float*)&world_view_proj_matrix,4);
g_pD3DDevice->SetSamplerState(0, D3DSAMP_MAGFILTER, D3DTEXF_LINEAR);
g_pD3DDevice->SetSamplerState(0, D3DSAMP_MINFILTER, D3DTEXF_LINEAR);
g_pD3DDevice->SetSamplerState(0, D3DSAMP_MIPFILTER, D3DTEXF_LINEAR);
g_pD3DDevice->SetSamplerState(0, D3DSAMP_ADDRESSU, D3DTADDRESS_CLAMP);
g_pD3DDevice->SetSamplerState(0, D3DSAMP_ADDRESSV, D3DTADDRESS_CLAMP);
g_pD3DDevice->SetTextureStageState(0, D3DTSS_TEXCOORDINDEX, 0);
g_pD3DDevice->SetRenderState(D3DRS_LIGHTING, FALSE);
D3DXCreateTextureFromFileEx(g_pD3DDevice,L"lena.bmp",
D3DX_DEFAULT,D3DX_DEFAULT,D3DX_DEFAULT,0,D3DFMT_UNKNOWN,
D3DPOOL_MANAGED,D3DX_DEFAULT,
D3DX_DEFAULT,0,NULL, NULL,
&g_pTexture);
}
void Viewport::Updata(wxIdleEvent &event)
{
begin = wxGetLocalTimeMillis().ToLong();
if(begin - last > (1000 / fps)){
static float angle = 0.0f;
angle += 0.01f;
D3DXMatrixRotationZ(&world_matrix,angle);
D3DXMATRIX world_view_proj_matrix = world_matrix * view_matrix * projection_matrix;
g_pD3DDevice->SetVertexShaderConstantF(0,(float*)&world_view_proj_matrix,4);
}
Refresh();
}
void Viewport::Reset(wxSizeEvent &event)
{
RECT rect;
GetWindowRect(g_hwnd, &rect);
g_pD3DPresent.BackBufferWidth = 0;
g_pD3DPresent.BackBufferHeight = 0;
g_pD3DPresent.BackBufferCount = 1;
g_pD3DDevice->Reset(&g_pD3DPresent);
int width,height;
GetSize(&width,&height);
float aspect = (float)height / (float)width;
D3DXMatrixPerspectiveFovRH(&projection_matrix,45.0f * (3.141592654f/180.0f),aspect,0.1f,10.0f);
D3DXMatrixLookAtRH(&view_matrix,&eye,&lookat,&up);
D3DXMATRIX world_view_proj_matrix = world_matrix * view_matrix * projection_matrix;
g_pD3DDevice->SetVertexShaderConstantF(0,(float*)&world_view_proj_matrix,4);
}
void Viewport::Render(wxEraseEvent &event)
{
g_pD3DDevice->Clear(0,NULL,D3DCLEAR_TARGET | D3DCLEAR_ZBUFFER,D3DCOLOR_ARGB(0, 0, 0, 0),1.0f,0);
g_pD3DDevice->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
g_pD3DDevice->BeginScene();
g_pD3DDevice->SetFVF(D3DFVF_XYZ | D3DFVF_TEX1);
g_pD3DDevice->SetVertexShader(g_pVertexShaderDX9);
g_pD3DDevice->SetPixelShader(g_pPixelShaderDX9);
g_pD3DDevice->SetTexture(0, g_pTexture);
g_pD3DDevice->DrawPrimitiveUP(D3DPT_TRIANGLESTRIP,2,quad,sizeof(VertexTexture));
g_pD3DDevice->EndScene();
g_pD3DDevice->Present(NULL,NULL,NULL,NULL);
}
2012年9月26日 星期三
wxWidgets+OpenGL+OpenCV
也可以結合Arduino做有趣又簡單的應用,當然還是寫成Multi Thread會比較順暢。
render_data.h
#ifndef __RENDER_DATA__
#define __RENDER_DATA__
typedef struct
{
float vertex[3];
float texcoord[2];
}VertexTexture;
VertexTexture vertex[4] = {
{{-0.5f,-0.5f,0.0f},{0.0f,1.0f}},
{{0.5f,-0.5f,0.0f},{1.0f,1.0f}},
{{-0.5f,0.5f,0.0f},{0.0f,0.0f}},
{{0.5f,0.5f,0.0f},{1.0f,0.0f}}
};
#endif
wxGLCV.h
#ifndef __WX_GLCV__
#define __WX_GLCV__
#include <wx/wx.h>
#include <wx/glcanvas.h>
#include <cv.h>
#include <highgui.h>
#include "render_data.h"
class wxGL;
class App:public wxApp
{
public:
bool OnInit();
};
class Frame:public wxFrame
{
public:
Frame(const wxString&);
~Frame();
void OnExit(wxCommandEvent&);
void CreateUI();
void Render(wxTimerEvent&);
void Capture();
private:
wxGL *gl;
wxTimer timer;
wxSlider *angle[3];
wxImage *image;
static const unsigned long fps = 25;
unsigned long begin,last;
CvCapture *camera;
IplImage *frame;
IplImage *temp;
DECLARE_EVENT_TABLE();
};
class wxGL:public wxGLCanvas
{
public:
wxGL(wxWindow*,wxWindowID,
const wxPoint&,
const wxSize&,
long,const wxString);
~wxGL();
void OnSize(wxSizeEvent&);
void InitGL();
void Render(wxImage*);
void SetAngleXYZ(int x,int y,int z){angleX = x;angleY = y,angleZ = z;};
private:
bool init;
float angleX,angleY,angleZ;
GLuint texture_id;
DECLARE_EVENT_TABLE();
};
enum{
RENDER_TIMER = 1000
};
DECLARE_APP(App)
IMPLEMENT_APP(App)
BEGIN_EVENT_TABLE(wxGL,wxGLCanvas)
EVT_SIZE(wxGL::OnSize)
END_EVENT_TABLE()
BEGIN_EVENT_TABLE(Frame,wxFrame)
EVT_MENU(wxID_EXIT,Frame::OnExit)
EVT_TIMER(RENDER_TIMER,Frame::Render)
END_EVENT_TABLE()
#endif
wxGLCV.cpp
#include "wxGLCV.h"
bool App::OnInit()
{
Frame *frame = new Frame(wxT("wxGL"));
frame->Show(true);
return true;
}
Frame::Frame(const wxString &title):wxFrame(NULL,wxID_ANY,title,wxDefaultPosition,wxSize(800,600)),timer(this,RENDER_TIMER)
{
CreateUI();
begin = 0;
last = 0;
camera = cvCaptureFromCAM(-1);
timer.Start(10);
}
void Frame::CreateUI()
{
wxMenu *file = new wxMenu;
file->Append(wxID_EXIT,wxT("E&xit\tAlt-q"),wxT("Exit"));
wxMenuBar *bar = new wxMenuBar;
bar->Append(file,wxT("File"));
SetMenuBar(bar);
wxBoxSizer *top = new wxBoxSizer(wxVERTICAL);
this->SetSizer(top);
wxBoxSizer *screen_box = new wxBoxSizer(wxVERTICAL);
top->Add(screen_box,0,wxALIGN_CENTER_HORIZONTAL | wxALL,5);
wxPanel *panel = new wxPanel(this,wxID_ANY,wxDefaultPosition,wxSize(380,380));
screen_box->Add(panel,0,wxALIGN_CENTER_HORIZONTAL | wxALL,5);
for(int i = 0;i < 3;++i){
angle[i] = new wxSlider(this,wxID_ANY,0,0,360,wxDefaultPosition,wxSize(300,-1),wxSL_HORIZONTAL | wxSL_AUTOTICKS | wxSL_LABELS);
screen_box->Add(angle[i],0,wxALIGN_CENTER_HORIZONTAL | wxALL,5);
}
gl = new wxGL(panel,wxID_ANY,wxDefaultPosition,wxSize(380,380),0,wxT("wxGL"));
CreateStatusBar(2);
SetStatusText(wxDateTime::Now().Format());
}
void Frame::Render(wxTimerEvent &event){
begin = wxGetLocalTimeMillis().ToLong();
gl->SetAngleXYZ(angle[0]->GetValue(),angle[1]->GetValue(),angle[2]->GetValue());
if(begin - last > (1000 / fps)){
last = begin;
Capture();
gl->Render(image);
}
}
void Frame::Capture()
{
frame = cvQueryFrame(camera);
temp = cvCreateImage(cvSize(frame->width,frame->height),8,3);
cvZero(temp);
cvConvertImage(frame,temp,CV_CVTIMG_SWAP_RB);
unsigned char *data;
cvGetRawData(temp,&data);
if(image){
delete image;
image = new wxImage(temp->width,temp->height,data,true);
}
else{
image = new wxImage(temp->width,temp->height,data,true);
}
}
Frame::~Frame()
{
if(frame){
cvReleaseImage(&frame);
}
if(temp){
cvReleaseImage(&temp);
}
if(camera){
cvReleaseCapture(&camera);
}
timer.Stop();
}
void Frame::OnExit(wxCommandEvent &event)
{
Close();
}
wxGL::wxGL(wxWindow *paraent,wxWindowID id = wxID_ANY,
const wxPoint &pos = wxDefaultPosition,
const wxSize &size = wxDefaultSize,
long style = 0,const wxString name = wxT("wxGL")):
wxGLCanvas(paraent,(wxGLCanvas*)NULL,id,pos,size,style | wxFULL_REPAINT_ON_RESIZE,name)
{
wxInitAllImageHandlers();
texture_id = 0;
init = false;
}
void wxGL::InitGL()
{
SetCurrent();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(60.0f,1.0f,0.1f,100.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0.0f,0.0f,2.0f,
0.0f,0.0f,0.0f,
0.0f,-1.0f,0.0f);
for(int i = 0;i < 4;++i){
vertex[i].texcoord[1] = 1.0f - vertex[i].texcoord[1];
}
glEnable(GL_DEPTH_TEST);
//glEnable(GL_CULL_FACE);
glEnable(GL_TEXTURE_2D);
}
wxGL::~wxGL()
{
}
void wxGL::OnSize(wxSizeEvent &event)
{
wxGLCanvas::OnSize(event);
int w,h;
GetClientSize(&w,&h);
if(GetContext()){
SetCurrent();
glViewport(0,0,(GLint)w,(GLint)h);
}
}
void wxGL::Render(wxImage *image)
{
wxPaintDC dc(this);
if(!init){
InitGL();
init = true;
}
SetCurrent();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glGenTextures(1,&texture_id);
glBindTexture(GL_TEXTURE_2D,texture_id);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGB8,
(GLint)image->GetWidth(),(GLint)image->GetHeight(),0,
GL_RGB,GL_UNSIGNED_BYTE,image->GetData());
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0.0f,0.0f,2.0f,
0.0f,0.0f,0.0f,
0.0f,-1.0f,0.0f);
glRotatef(angleX,1.0f,0.0f,0.0f);
glRotatef(angleY,0.0f,1.0f,0.0f);
glRotatef(angleZ,0.0f,0.0f,1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3,GL_FLOAT,sizeof(VertexTexture),&vertex[0].vertex);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(2,GL_FLOAT,sizeof(VertexTexture),&vertex[0].texcoord);
glDrawArrays(GL_TRIANGLE_STRIP,0,4);
SwapBuffers();
}
g++ -o2 -o wxGLCV wxGLCV.cpp `wx-config --cxxflags --libs --unicode --gl-libs` `pkg-config opencv --cflags --libs`
2012年9月21日 星期五
wxWidgets With OpenGL
最近整理硬碟突然翻到以前剛開始從MFC改用wxWidgets時寫的結合OpenGL的練習實驗,也想到還沒紀錄最基本的怎麼讓wxWidgets操作OpenGL,因為samples的方法我覺得不好,所以我覺得還是紀錄上來好了,基本上只要把wxPanel當作parent傳給wxGLCanvas就可以畫在wxPanel上,然後就可以用wxBoxSizer去layout了,用wxImage載入材質再將相關資訊bind上目標並用VA的方式畫出等等...。
render_data.h
#ifndef __RENDER_DATA__
#define __RENDER_DATA__
typedef struct
{
float vertex[3];
float texcoord[2];
}VertexTexture;
VertexTexture vertex[4] = {
{{-0.5f,-0.5f,0.0f},{0.0f,1.0f}},
{{0.5f,-0.5f,0.0f},{1.0f,1.0f}},
{{-0.5f,0.5f,0.0f},{0.0f,0.0f}},
{{0.5f,0.5f,0.0f},{1.0f,0.0f}}
};
#endif
wxGL.h
#ifndef __WX_GL__
#define __WX_GL__
#include <wx/wx.h>
#include <wx/glcanvas.h>
#include "render_data.h"
class wxGL;
class App:public wxApp
{
public:
bool OnInit();
};
class Frame:public wxFrame
{
public:
Frame(const wxString&);
~Frame();
void OnExit(wxCommandEvent&);
void CreateUI();
void Render(wxTimerEvent&);
private:
wxGL *gl;
wxTimer timer;
wxSlider *angle[3];
static const unsigned long fps = 60;
unsigned long begin,last;
DECLARE_EVENT_TABLE();
};
class wxGL:public wxGLCanvas
{
public:
wxGL(wxWindow*,wxWindowID,
const wxPoint&,
const wxSize&,
long,const wxString);
~wxGL();
void OnSize(wxSizeEvent&);
void InitGL();
void Render();
void SetAngleXYZ(int x,int y,int z){angleX = x;angleY = y,angleZ = z;};
private:
bool init;
float angleX,angleY,angleZ;
wxImage *image;
GLuint texture_id;
DECLARE_EVENT_TABLE();
};
enum{
RENDER_TIMER = 1000
};
DECLARE_APP(App)
IMPLEMENT_APP(App)
BEGIN_EVENT_TABLE(wxGL,wxGLCanvas)
EVT_SIZE(wxGL::OnSize)
END_EVENT_TABLE()
BEGIN_EVENT_TABLE(Frame,wxFrame)
EVT_MENU(wxID_EXIT,Frame::OnExit)
EVT_TIMER(RENDER_TIMER,Frame::Render)
END_EVENT_TABLE()
#endif
wxGL.cpp
#include "wxGL.h"
bool App::OnInit()
{
Frame *frame = new Frame(wxT("wxGL"));
frame->Show(true);
return true;
}
Frame::Frame(const wxString &title):wxFrame(NULL,wxID_ANY,title,wxDefaultPosition,wxSize(800,600)),timer(this,RENDER_TIMER)
{
CreateUI();
begin = 0;
last = 0;
timer.Start(10);
}
void Frame::CreateUI()
{
wxMenu *file = new wxMenu;
file->Append(wxID_EXIT,wxT("E&xit\tAlt-q"),wxT("Exit"));
wxMenuBar *bar = new wxMenuBar;
bar->Append(file,wxT("File"));
SetMenuBar(bar);
wxBoxSizer *top = new wxBoxSizer(wxVERTICAL);
this->SetSizer(top);
wxBoxSizer *screen_box = new wxBoxSizer(wxVERTICAL);
top->Add(screen_box,0,wxALIGN_CENTER_HORIZONTAL | wxALL,5);
wxPanel *panel = new wxPanel(this,wxID_ANY,wxDefaultPosition,wxSize(380,380));
screen_box->Add(panel,0,wxALIGN_CENTER_HORIZONTAL | wxALL,5);
for(int i = 0;i < 3;++i){
angle[i] = new wxSlider(this,wxID_ANY,0,0,360,wxDefaultPosition,wxSize(300,-1),wxSL_HORIZONTAL | wxSL_AUTOTICKS | wxSL_LABELS);
screen_box->Add(angle[i],0,wxALIGN_CENTER_HORIZONTAL | wxALL,5);
}
gl = new wxGL(panel,wxID_ANY,wxDefaultPosition,wxSize(380,380),0,wxT("wxGL"));
CreateStatusBar(2);
SetStatusText(wxDateTime::Now().Format());
}
void Frame::Render(wxTimerEvent &event){
begin = wxGetLocalTimeMillis().ToLong();
gl->SetAngleXYZ(angle[0]->GetValue(),angle[1]->GetValue(),angle[2]->GetValue());
if(begin - last > (1000 / fps)){
last = begin;
gl->Render();
}
}
Frame::~Frame()
{
timer.Stop();
}
void Frame::OnExit(wxCommandEvent &event)
{
Close();
}
wxGL::wxGL(wxWindow *paraent,wxWindowID id = wxID_ANY,
const wxPoint &pos = wxDefaultPosition,
const wxSize &size = wxDefaultSize,
long style = 0,const wxString name = wxT("wxGL")):
wxGLCanvas(paraent,(wxGLCanvas*)NULL,id,pos,size,style | wxFULL_REPAINT_ON_RESIZE,name)
{
wxInitAllImageHandlers();
image = new wxImage(wxT("lena_std.tif"));
texture_id = 0;
init = false;
}
void wxGL::InitGL()
{
SetCurrent();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(60.0f,1.0f,0.1f,100.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0.0f,0.0f,2.0f,
0.0f,0.0f,0.0f,
0.0f,-1.0f,0.0f);
for(int i = 0;i < 4;++i){
vertex[i].texcoord[1] = 1.0f - vertex[i].texcoord[1];
}
glGenTextures(1,&texture_id);
glBindTexture(GL_TEXTURE_2D,texture_id);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGB8,
(GLint)image->GetWidth(),(GLint)image->GetHeight(),0,
GL_RGB,GL_UNSIGNED_BYTE,image->GetData());
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glEnable(GL_DEPTH_TEST);
//glEnable(GL_CULL_FACE);
glEnable(GL_TEXTURE_2D);
}
wxGL::~wxGL()
{
}
void wxGL::OnSize(wxSizeEvent &event)
{
wxGLCanvas::OnSize(event);
int w,h;
GetClientSize(&w,&h);
if(GetContext()){
SetCurrent();
glViewport(0,0,(GLint)w,(GLint)h);
}
}
void wxGL::Render()
{
wxPaintDC dc(this);
if(!init){
InitGL();
init = true;
}
SetCurrent();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(0.0f,0.0f,2.0f,
0.0f,0.0f,0.0f,
0.0f,-1.0f,0.0f);
glRotatef(angleX,1.0f,0.0f,0.0f);
glRotatef(angleY,0.0f,1.0f,0.0f);
glRotatef(angleZ,0.0f,0.0f,1.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glVertexPointer(3,GL_FLOAT,sizeof(VertexTexture),&vertex[0].vertex);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glTexCoordPointer(2,GL_FLOAT,sizeof(VertexTexture),&vertex[0].texcoord);
glDrawArrays(GL_TRIANGLE_STRIP,0,4);
SwapBuffers();
}
2012年8月29日 星期三
Heartbeat Sensor
前幾天看到這篇Arduino 心拍センサシールド感覺很有趣所以也來實做看看,原理是利用心臟收縮與擴張時血液流通使末梢組織的透明度發生變化,這時候將手指放上CNY70會因為這樣讓反射回來的紅外線有差異產生PWM,因此就可以靠產生的PWM測出心跳以及次數。
Part List :
程式:
每秒取樣50次左右,並且確定積分值是否有達到形成PWM的程度來判斷是否有PWM,並且以每個PWM的波谷區隔每個波來更新心跳次數。
DEMO:
使用的時候要先調整10k可變電阻,將手指放上CNY70完全蓋住但是不能大力壓緊,接著觀察到LED會隨著心跳變動就可以了,如果不行則繼續調整可變電阻的分壓。
Part List :
- CNY70(或者其他種類的上照式光遮斷器)
- 紅色LED
- 10k Ohm可變電阻
- 220 Ohm電阻 X 2
- 4.7k Ohm電阻
- 1k Ohm電阻
- 100k Ohm電阻
- 1M Ohm電阻
- 0.1uF陶瓷、積層、無極性電解電容 X 2
- 1uF陶瓷、積層、無極性電解電容
- LM358雙運算放大器(我買到的上面寫HA17358,但是我發現不是所有運放都可以,例如NE5532、JRC4556AD就不行,所以這邊就用原本的358運放,可能還是要請有電子背景的朋友研究一下datasheet解答差別)
電路說明:
左邊那個四腳的IC就是CNY70,將CNY70的紅外線發射(負極)與接收(射極)晶體接地,紅外線發射正極接上220Ohm的電阻再接上5V電源,接著將10k的可變電阻左端接上4.7k的電阻再接上5V電源分壓,然後右端接地,接著中間輸出腳(圖中黃色接線的部份)分別接上CNY70的接收晶體(集極)以及1uF的電容,並且將電容另一隻腳接到LM358的第三隻腳(正向輸入端),而這隻腳還要再接一個 100k的電阻接地,接著LM358的第二隻腳(反向輸入端)要接一個1k電阻接地,並且還要連接一個1M電阻與0.1uF電容到LM358的第一隻腳(輸出端)做回授,然後LM358的第四隻腳要接地,LM358的第八隻腳要接5V電源
並且要再接一個0.1uF電容接地消除雜訊,最後將LM358的第一隻腳(輸出端)分別接到一個220k的電阻與LED上(圖中綠色接線的部份)以及接到Arduino的Analog Pin 0(圖中紫色接線部份)。
程式:
每秒取樣50次左右,並且確定積分值是否有達到形成PWM的程度來判斷是否有PWM,並且以每個PWM的波谷區隔每個波來更新心跳次數。
int sensor_pin = 0;
int n = 0;
int n_1 = 0;
int diff = 0;
int count = 0;
int elapse_up = 0;
int elapse_down = 0;
int integral_plus = 0;
int integral_minus = 0;
void setup()
{
Serial.begin(9600);
}
void loop()
{
n = analogRead(sensor_pin); //取得現在得到的值
diff = (n - n_1); //將現在的值減去上次得到的值(N -(N-1))
n_1 = n; //更新上一次的值
//如果差值趨近零,則表示到達波谷又要進到新的一個PWM中。
//並且在確定上個PWM的爬升(integral_plus)與下降(integral_minus)
//是否有到達積分值來確定是否有PWM。
//兩者成立則更新所有狀態。
if(diff < 10 && diff > -10 &&
integral_plus > 160 && integral_minus < -200){
if(elapse_up > 50 && elapse_up < 400 &&
elapse_down > 96 && elapse_down < 800){
++count;
Serial.print(count);
Serial.println(" Heartbeat");
}
elapse_up = 0;
elapse_down = 0;
integral_plus = 0;
integral_minus = 0;
}
else if(diff > 20 && diff < 280){
//PWM上升
integral_plus += diff;
elapse_up += 20;
}
else if(diff < -20 && diff > -200){
//PWM下降
integral_minus += diff;
elapse_down += 20;
}
//每20ms取樣,即一秒取樣50次。
delay(20);
}
DEMO:
使用的時候要先調整10k可變電阻,將手指放上CNY70完全蓋住但是不能大力壓緊,接著觀察到LED會隨著心跳變動就可以了,如果不行則繼續調整可變電阻的分壓。
=============2012/09/17===============
最近有空把它Layout成板子,有+ -號那邊就是電源。
訂閱:
文章 (Atom)