欧美大屁股bbbbxxxx,狼人大香伊蕉国产www亚洲,男ji大巴进入女人的视频小说,男人把ji大巴放进女人免费视频,免费情侣作爱视频

歡迎來到入門教程網(wǎng)!

C語言

當(dāng)前位置:主頁 > 軟件編程 > C語言 >

基于C++實(shí)現(xiàn)kinect+opencv 獲取深度及彩色數(shù)據(jù)

來源:本站原創(chuàng)|時間:2020-01-10|欄目:C語言|點(diǎn)擊: 次

開發(fā)環(huán)境 vs2010+OPENCV2.4.10

首先,下載最新的Kinect 2 SDK  http://www.microsoft.com/en-us/kinectforwindows/develop/downloads-docs.aspx

下載之后不要插入Kinect,最好也不用插入除了鍵盤鼠標(biāo)以外的其它USB設(shè)備,然后安裝SDK,安裝完成之后插入Kinect,會有安裝新設(shè)備的提示。安裝完成之后可以去“開始”那里找到兩個新安裝的軟件,一個是可以顯示Kinect深度圖,另外一個軟件展示SDK中的各種例子程序。

進(jìn)入SDK的安裝目錄,可以找到sample這個文件夾,里面是四種語言編寫的例子,其中native是C++的,managed是C#的,還有另外兩種語言不熟悉,我就熟悉C++,反正只是試試的,就用C++了。

opencv+kinect .cpp

#include <opencv2\opencv.hpp> 
#include<iostream>
//windows的頭文件,必須要,不然NuiApi.h用不了
#include <Windows.h>
//Kinect for windows 的頭文件
#include "NuiApi.h"
 
using namespace std;
using namespace cv;
 
#include <d3d11.h>
 
 
//最遠(yuǎn)距離(mm)
const int MAX_DISTANCE = 3500;
//最近距離(mm)
const int MIN_DISTANCE = 200;
 
const LONG m_depthWidth = 640;
const LONG m_depthHeight = 480;
const LONG m_colorWidth = 640;
const LONG m_colorHeight = 480;
const LONG cBytesPerPixel = 4;
 
int main()
{
  //彩色圖像
  Mat image_rgb;
  //深度圖像
  Mat image_depth;
 
  //創(chuàng)建一個MAT
  image_rgb.create(480,640,CV_8UC3);
  image_depth.create(480,640,CV_8UC1);
 
  //一個KINECT實(shí)例指針
  INuiSensor* m_pNuiSensor = NULL;
 
  if (m_pNuiSensor != NULL)
  {
    return 0;
  }
 
  //記錄當(dāng)前連接KINECT的數(shù)量(為多連接做準(zhǔn)備)
  int iSensorCount;
  //獲得當(dāng)前KINECT的數(shù)量
  HRESULT hr = NuiGetSensorCount(&iSensorCount);
 
 
  //按照序列初始化KINETC實(shí)例,這里就連接了一個KINECT,所以沒有用到循環(huán)
  hr = NuiCreateSensorByIndex(iSensorCount - 1, &m_pNuiSensor);
  //初始化,讓其可以接收彩色和深度數(shù)據(jù)流
  hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH);
 
  //判斷是否出錯
  if (FAILED(hr))
  {
    cout<<"NuiInitialize failed"<<endl;
    return hr;
  }
 
  //彩色圖像獲取下一幀事件
  HANDLE nextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
  //彩色圖像事件句柄
  HANDLE colorStreamHandle = NULL;
  //深度圖像獲取下一幀事件
  HANDLE nextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
  //深度圖像事件句柄
  HANDLE depthStreamHandle = NULL;
 
  //實(shí)例打開數(shù)據(jù)流,這里NUI_IMAGE_TYPE_COLOR表示彩色圖像
  hr = m_pNuiSensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0,2,nextColorFrameEvent,&colorStreamHandle);
 
  if( FAILED( hr ) )//判斷是否提取正確
  {
    cout<<"Could not open color image stream video"<<endl;
    m_pNuiSensor->NuiShutdown();
    return hr;
  }
 
  //實(shí)例打開數(shù)據(jù)流,這里NUI_IMAGE_TYPE_DEPTH表示深度圖像
  hr = m_pNuiSensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH, NUI_IMAGE_RESOLUTION_640x480, 0,2, nextDepthFrameEvent, &depthStreamHandle);
 
  if( FAILED( hr ) )//判斷是否提取正確
  {
    cout<<"Could not open color image stream video"<<endl;
    m_pNuiSensor->NuiShutdown();
    return hr;
  }
 
 
 
  cv::namedWindow("depth", CV_WINDOW_AUTOSIZE);
  moveWindow("depth",300,600);
  cv::namedWindow("colorImage",CV_WINDOW_AUTOSIZE);
  moveWindow("colorImage",0,200);
 
  while (1)
  {
    NUI_IMAGE_FRAME pImageFrame_rgb;
    NUI_IMAGE_FRAME pImageFrame_depth;
 
    //無限等待新的彩色數(shù)據(jù),等到后返回
    if (WaitForSingleObject(nextColorFrameEvent, 0) == 0)
    {
      //從剛才打開數(shù)據(jù)流的流句柄中得到該幀數(shù)據(jù),讀取到的數(shù)據(jù)地址存于pImageFrame
      hr = m_pNuiSensor->NuiImageStreamGetNextFrame(colorStreamHandle, 0, &pImageFrame_rgb);
      if (FAILED(hr))
      {
        cout<<"Could not get color image"<<endl;
        m_pNuiSensor->NuiShutdown();
        return -1;
      }
 
      INuiFrameTexture *pTexture = pImageFrame_rgb.pFrameTexture;
      NUI_LOCKED_RECT lockedRect;
 
      //提取數(shù)據(jù)幀到LockedRect,它包括兩個數(shù)據(jù)對象:pitch每行字節(jié)數(shù),pBits第一個字節(jié)地址
      //并鎖定數(shù)據(jù),這樣當(dāng)我們讀數(shù)據(jù)的時候,kinect就不會去修改它
 
 
      pTexture->LockRect(0, &lockedRect, NULL, 0);
      //確認(rèn)獲得的數(shù)據(jù)是否有效
      if (lockedRect.Pitch != 0)
      {
        //將數(shù)據(jù)轉(zhuǎn)換為OpenCV的Mat格式
        for (int i = 0; i < image_rgb.rows; i++)
        {
          //第i行的指針
          uchar *prt = image_rgb.ptr(i);
 
          //每個字節(jié)代表一個顏色信息,直接使用uchar
          uchar *pBuffer = (uchar*)(lockedRect.pBits) + i * lockedRect.Pitch;
 
          for (int j = 0; j < image_rgb.cols; j++)
          {  
            prt[3 * j] = pBuffer[4 * j];//內(nèi)部數(shù)據(jù)是4個字節(jié),0-1-2是BGR,第4個現(xiàn)在未使用
            prt[3 * j + 1] = pBuffer[4 * j + 1];
            prt[3 * j + 2] = pBuffer[4 * j + 2];
          }
        }
 
        imshow("colorImage",image_rgb);
        //解除鎖定
        pTexture->UnlockRect(0);
        //釋放幀
        m_pNuiSensor->NuiImageStreamReleaseFrame(colorStreamHandle, &pImageFrame_rgb );
      }
      else
      {
        cout<<"Buffer length of received texture is bogus\r\n"<<endl;
      }
 
      BOOL nearMode;
      INuiFrameTexture* pColorToDepthTexture; 
 
 
      //深度圖像的處理
      if (WaitForSingleObject(nextDepthFrameEvent, INFINITE) == 0)
      {
 
        hr = m_pNuiSensor->NuiImageStreamGetNextFrame(depthStreamHandle, 0 , &pImageFrame_depth);
 
        if (FAILED(hr))
        {
          cout<<"Could not get color image"<<endl;
          NuiShutdown();
          return -1;
        }
 
        hr = m_pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture( 
          depthStreamHandle, &pImageFrame_depth, &nearMode, &pColorToDepthTexture); 
        INuiFrameTexture *pTexture = pImageFrame_depth.pFrameTexture;
        NUI_LOCKED_RECT lockedRect;
        NUI_LOCKED_RECT ColorToDepthLockRect; 
 
        pTexture->LockRect(0, &lockedRect, NULL, 0);
        pColorToDepthTexture->LockRect(0,&ColorToDepthLockRect,NULL,0); 
 
        //歸一化
        for (int i = 0; i < image_depth.rows; i++)
        {
          uchar *prt = image_depth.ptr<uchar>(i);
 
          uchar* pBuffer = (uchar*)(lockedRect.pBits) + i * lockedRect.Pitch;
          //這里需要轉(zhuǎn)換,因為每個深度數(shù)據(jù)是2個字節(jié),應(yīng)將BYTE轉(zhuǎn)成USHORT
          USHORT *pBufferRun = (USHORT*)pBuffer;
 
          for (int j = 0; j < image_depth.cols; j++)
          {
            //先向,將數(shù)據(jù)歸一化處理,對深度距離在300mm-3500mm范圍內(nèi)的像素,映射到【0—255】內(nèi),
            //超出范圍的,都去做是邊緣像素
            if (pBufferRun[j] << 3 > MAX_DISTANCE) prt[j] = 255;
            else if(pBufferRun[j] << 3 < MIN_DISTANCE) prt[j] = 0;
            else prt[j] = (BYTE)(256 * (pBufferRun[j] << 3)/ MAX_DISTANCE);
          }
        }
        imshow("depth", image_depth);
 
 
 
        //接下來是對齊部分,將前景摳出來
 
        //存放深度點(diǎn)的參數(shù)
        NUI_DEPTH_IMAGE_POINT* depthPoints = new NUI_DEPTH_IMAGE_POINT[640 * 480];
        if (ColorToDepthLockRect.Pitch != 0) 
        { 
          HRESULT hrState = S_OK; 
           
          //一個能在不同空間坐標(biāo)轉(zhuǎn)變的類(包括:深度,彩色,骨骼)
          INuiCoordinateMapper* pMapper; 
 
          //設(shè)置KINECT實(shí)例的空間坐標(biāo)系
          hrState = m_pNuiSensor->NuiGetCoordinateMapper(&pMapper); 
 
          if (FAILED(hrState)) 
          { 
            return hrState; 
          } 
 
          //重要的一步:從顏色空間映射到深度空間。參數(shù)說明:
          //【參數(shù)1】:彩色圖像的類型
          //【參數(shù)2】:彩色圖像的分辨率
          //【參數(shù)3】:深度圖像的分辨率
          //【參數(shù)4】:深度圖像的個數(shù)
          //【參數(shù)5】:深度像素點(diǎn)數(shù)
          //【參數(shù)6】:取內(nèi)存的大小,個數(shù)。類型為NUI_DEPTH_IMAGE_PIXEL
          //【參數(shù)7】:存放映射結(jié)果點(diǎn)的參數(shù)
          hrState = pMapper->MapColorFrameToDepthFrame(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, NUI_IMAGE_RESOLUTION_640x480, 
            640 * 480, (NUI_DEPTH_IMAGE_PIXEL*)ColorToDepthLockRect.pBits,640 * 480, depthPoints); 
 
          if (FAILED(hrState)) 
          { 
            return hrState; 
          } 
 
 
          //顯示的圖像
          Mat show;
          show.create(480,640,CV_8UC3);
          show = 0;
 
          for (int i = 0; i < image_rgb.rows; i++)
          {
            for (int j = 0; j < image_rgb.cols; j++)
            {
              uchar *prt_rgb = image_rgb.ptr(i);
              uchar *prt_show = show.ptr(i);
              //在內(nèi)存中偏移量
              long index = i * 640 + j; 
              //從保存了映射坐標(biāo)的數(shù)組中獲取點(diǎn)
              NUI_DEPTH_IMAGE_POINT depthPointAtIndex = depthPoints[index]; 
 
              //邊界判斷
              if (depthPointAtIndex.x >= 0 && depthPointAtIndex.x < image_depth.cols &&
                depthPointAtIndex.y >=0 && depthPointAtIndex.y < image_depth.rows)
              {
                //深度判斷,在MIN_DISTANCE與MAX_DISTANCE之間的當(dāng)成前景,顯示出來
                //這個使用也很重要,當(dāng)使用真正的深度像素點(diǎn)再在深度圖像中獲取深度值來判斷的時候,會出錯
                if (depthPointAtIndex.depth >= MIN_DISTANCE && depthPointAtIndex.depth <= MAX_DISTANCE)
                {
                  prt_show[3 * j]   = prt_rgb[j * 3];
                  prt_show[3 * j + 1] = prt_rgb[j * 3 + 1];
                  prt_show[3 * j + 2] = prt_rgb[j * 3 + 2];
                }
              }
            }
          }
          imshow("show", show);
        }
 
        delete []depthPoints;
         
        pTexture->UnlockRect(0);
        m_pNuiSensor->NuiImageStreamReleaseFrame(depthStreamHandle, &pImageFrame_depth);
      }
 
      else
      {
        cout<<"Buffer length of received texture is bogus\r\n"<<endl;
      }
    }
 
    if (cvWaitKey(20) == 27)
      break;
  }
  return 0;
}

網(wǎng)頁制作CMS教程網(wǎng)絡(luò)編程軟件編程腳本語言數(shù)據(jù)庫服務(wù)器

如果侵犯了您的權(quán)利,請與我們聯(lián)系,我們將在24小時內(nèi)進(jìn)行處理、任何非本站因素導(dǎo)致的法律后果,本站均不負(fù)任何責(zé)任。

聯(lián)系QQ:835971066 | 郵箱:835971066#qq.com(#換成@)

Copyright © 2002-2020 腳本教程網(wǎng) 版權(quán)所有