diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c597d68 --- /dev/null +++ b/.gitignore @@ -0,0 +1,32 @@ +################################################################################ +# This .gitignore file was automatically created by Microsoft(R) Visual Studio. +################################################################################ + +/.vs +/windows/HoloLensBridge/.vs/HoloLensNavigation/v16 +/linux/navigation_launcher/params/left0000_pointcloud.jpg +/linux/navigation_launcher/params/left0000.jpg +/windows/HoloLensBridge/Debug/HoloLensNavigation +/windows/HoloLensBridge/HoloLensNavigation/Debug +/windows/HoloLensBridge/HoloLensNavigation/Release +/windows/HoloLensBridge/packages/Eigen.3.3.3 +/windows/HoloLensBridge/Release/HoloLensNavigation +/windows/HoloLensBridge/HoloLensNavigation/HoloLensNavigation.vcxproj.user +/windows/HoloLensBridge/ARM64/Release/HoloLensNavigation +/windows/HoloLensBridge/HoloLensNavigation/ARM64/Release +/windows/HoloLensBridge/ARM64/Debug/HoloLensNavigation +/windows/HoloLensBridge/HoloLensNavigation/ARM64/Debug +/windows/HoloLensBridge/HoloLensNavigation/x64/Release +/windows/HoloLensBridge/x64/Release/HoloLensNavigation +/linux/navigation_launcher/params/left0000 (copy).jpg +/linux/HoloROSBridge/src/hololens_ros_bridge_node_OLD.cpp +/windows/HoloLensBridge/ARM/Release/HoloLensNavigation +/windows/HoloLensBridge/HoloLensNavigation/ARM/Release +/linux/holo_nav_dash/webSocket.pyc +/linux/holo_nav_dash/settings.pyc +/windows/packages/Eigen.3.3.3 +/windows/bin +/windows/.vs/HoloLensSpatialMapping/v16 +/windows/build +/windows/.vs/MSRHoloLensSpatialMapping/v16 +/windows/.vs diff --git a/README.md b/README.md index 5cd7cec..694f7e3 100644 --- a/README.md +++ b/README.md @@ -1,33 +1,98 @@ -# Project +# HoloLens ROS Navigation Package -> This repo has been populated by an initial template to help get you started. Please -> make sure to update the content to build a great experience for community-building. +## Overview -As the maintainer of this project, please make a few updates: +This is HoloLens bridge example for Robot navigation in ROS system. It contains three modules: -- Improving this README.MD file to provide a great experience -- Updating SUPPORT.MD with content about this project's support experience -- Understanding the security reporting process in SECURITY.MD -- Remove this section from the README +### HoloLensBridge +Universal Windows Platform (UWP) application for HoloLens. Communicates with HoloROSBridge. -## Contributing +### HoloROSBridge +ROS package of HoloLens brigde. +Module for using HoloLens in ROS system. -This project welcomes contributions and suggestions. Most contributions require you to agree to a -Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us -the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com. +### HoloLens_Localization +ROS package including HoloLens Localization module, +offline calibration between HoloLens and Robot's head, and online calibration between HoloLens and Robot's base. -When you submit a pull request, a CLA bot will automatically determine whether you need to provide -a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions -provided by the bot. You will only need to do this once across all repos using our CLA. +## Prerequisites, installation and build -This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). -For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or -contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. +Follow instructions in the [setup instructions](Setup/README.md). -## Trademarks +## Tips -This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft -trademarks or logos is subject to and must follow -[Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general). -Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship. -Any use of third-party trademarks or logos are subject to those third-party's policies. +General tips aiding in the deployment, handling Pepper, accessing HoloLens, etc can be found [here](/Setup/TIPS.md). + + +## How to use + +### Advance Preparation +Follow instructions how to build and install the map [here](Setup/MAP.md). + +Attach HoloLens to Robot's Head (instructions TODO) + +### Running Process +#### HoloLens Stack +- (HoloLens) Boot HoloLens Bridge + - Launch the HoloLensNavigation application from Device Portal (access the HoloLens ip from browser). Or use alternative methods. +- (ROS) Launch Pepper's stack + - `$ roslaunch pepper_bringup pepper_full.launch nao_ip:= network_interface:=` + - The local ROS computer's network interface name can be found using the terminal command "ifconfig" and looking for the name associated with the active IP address. Do not include the colon after the network interface. + - Ideally start Pepper with life disabled. Use Choregraph or refer to the [tips](/Setup/TIPS.md) document for alternative options. +- (ROS) Launch HoloLens stack + - `$ roslaunch navigation_launcher hololensstack.launch HoloLens_ip:=` + - Note that XTerm needs to be installed for this as the script uses it to interact with the calibration. + +#### Navigation Stack +- (ROS) Launch Navigation program + - ```roslaunch navigation_launcher navstack.launch``` + +#### Calibration +- in the calibration window: + - move Pepper's head into inital/default pose. Use either Choregraph or connect to Pepper via SSH and set the pitch directly: + - ```qicli call ALMotion.setAngles "HeadPitch" 0.0 0.3``` + - ```qicli call ALMotion.setAngles "HeadYaw" 0.0 0.3``` + - press ```space``` to record the initial position. + - move Pepper's head upward. Use either Choregraph or connect to Pepper via SSH and set the pitch directly: + - ```qicli call ALMotion.setAngles "HeadPitch" -0.35 0.3``` + - press ```space``` again to record the new position. + - reset Pepper's head pitch and then rotate to left. Use either Choregraph or connect to Pepper via SSH: + - ```qicli call ALMotion.setAngles "HeadPitch" 0.0 0.3``` + - ```qicli call ALMotion.setAngles "HeadYaw" 0.7 0.3``` + - press ```space``` to record the new position. + - rotate Pepper's head to the right. Use either Choregraph or connect to Pepper via SSH: + - ```qicli call ALMotion.setAngles "HeadYaw" -0.7 0.3``` + - press ```space``` to record the new position. + - press ```c``` to calibrate. + - reset Pepper's head pitch and rotation. Use either Choregraph or connect to Pepper via SSH: + - ```qicli call ALMotion.setAngles "HeadPitch" 0.0 0.3``` + - ```qicli call ALMotion.setAngles "HeadYaw" 0.0 0.3``` + +#### Navigation +- (ROS) Launch rviz + - `$ rosrun rviz rviz` + - add Map and Pepper RobotModel topics. Alternatively, load the [pepper.rviz](rviz/pepper.rviz) rviz configuration file. +- In rviz, select `2D Pose Estimate` and set Pepper's inital position and direction on the map. Try to be as precise as + possible. The script will calculate a pose estimate and localize the Pepper model. +- In rviz, select `2D Nav Goal` and select a destination goal and orientation on the map. +- Pepper navigation will start. + + + +### Running Process (INDIVIDUAL NODES) +- Pepper ROS full stack + - ```$ roslaunch pepper_bringup pepper_full.launch nao_ip:= network_interface:=``` + - example: ```roslaunch pepper_bringup pepper_full.launch nao_ip:=10.1.1.202 network_interface:=enp3s0``` +- HoloLens ROS Bridge + - ```$ rosrun hololens_ros_bridge hololens_ros_bridge_node 1234``` + - example: ```rosrun hololens_ros_bridge hololens_ros_bridge_node 10.1.1.206 1234``` +- ROS map_server + - ```$ rosrun map_server map_server src/navigation_launcher/params/map.yaml``` +- HoloLens Anchor Localizer + - ```$ rosrun hololens_localizer anchor_localizer``` +- Localizer Calibration + - ```$ rosrun hololens_localizer static_calibration [calibrationFileName]``` + - example: ```rosrun hololens_localizer static_calibration odom Head base_footprint calibrationData.bin``` +- Dynamic Adjuster + - ```$ rosrun hololens_localizer dynamic_adjuster.py ``` + - example: ```rosrun hololens_localizer dynamic_adjuster.py``` diff --git a/Setup/MAP.md b/Setup/MAP.md new file mode 100644 index 0000000..ef75581 --- /dev/null +++ b/Setup/MAP.md @@ -0,0 +1,34 @@ +# How to build a map + +## HoloLens Spatial Mapping + +Use the HoloLens device to build a spatial mapping of your environment. Map the floor up to at least your eye level, +and make sure to carefully trace along the floor edges to get accurate readings. + +For visual feedback, compile and run the HoloLensNavigation application and complete the spatial mesh mapping. Alternatively, + compile and run the lighter wieght Microsoft's Holographic spatial mapping sample found +[here](https://github.com/microsoft/Windows-universal-samples/tree/master/Samples/HolographicSpatialMapping). + +## Create a floor plan image from HoloLens' Spatial Map in ROS + +- launch the HoloLensNavigation app on your HoloLens device + - there are different ways to launch the application. Easiest way is to use the Device Portal to launch +application. Alternatively wear the headset and launch from GUI, or launch from VisualStudio. +- launch the ROS map_server + - ```$ rosrun map_server map_server src/navigation_launcher/params/map.yaml``` +- launch the HoloLens bridge in ROS + - ```$ rosrun hololens_ros_bridge hololens_ros_bridge_node 1234``` + - note that ```1234``` is the port number. +- launch the HoloLens localizer in ROS + - ```$ rosrun hololens_localizer anchor_localizer``` +- launch the image saver node in ROS + - ```$ rosrun image_view image_saver image:=/hololens/image``` +- launch rviz in ROS + - ```$ rviz``` + - select "2D Pose Estimate" and then click anywhere on the map view to set initial position in any location and + direction. This instructs image_saver to create a cross-section of the HoloLens' spatial map and save it as + ```left000.jpg``` in the same folder the image_saver was launched. +- open the file ```left0000.jpg``` + - open the file ```left0000.jpg``` with your favorite image editor. + - using the depicted pointcloud outline, create a ROS compliant image map. + - save the modified ```left0000.jpg``` file in the ```navigation_launcher/params/``` folder. diff --git a/Setup/README.md b/Setup/README.md new file mode 100644 index 0000000..fa82cc1 --- /dev/null +++ b/Setup/README.md @@ -0,0 +1,157 @@ +# Prerequisites, installation and build instructions + +## Prerequisites + +### Ubuntu 18.04.5 LTS + +Install v18 LTS from https://releases.ubuntu.com/18.04/. + +Install XTerm + +### ROS Melodic + +http://wiki.ros.org/melodic/Installation/Ubuntu. + +Additional ROS packages required: + +``` +$ sudo apt-get install ros-melodic-driver-base +$ sudo apt-get install ros-melodic-move-base-msgs ros-melodic-octomap ros-melodic-octomap-msgs +$ sudo apt-get install ros-melodic-map-server +$ sudo apt-get install ros-melodic-camera-info-manager ros-melodic-camera-info-manager-py +$ sudo apt-get install ros-melodic-rgbd-launch +$ sudo apt-get install ros-melodic-husky-navigation +``` + +``` +$ sudo apt-get install python-catkin-tools +``` + +Optional: + +``` +$ cd ~/catkin_ws/src/ +$ git clone https://github.com/ros-teleop/teleop_twist_keyboard +$ cd ~/catkin_ws/ +$ catkin_make +``` + +### Ceres Solver + +Install dependecies: + +``` +sudo apt-get install libgoogle-glog-dev +sudo apt-get install libatlas-base-dev +sudo apt-get install libeigen3-dev +``` + +Get Ceres Solver source code, build and install it: + +``` +$ mkdir -p ~/ceres +$ cd ~/ceres/ +wget http://ceres-solver.org/ceres-solver-1.14.0.tar.gz +tar xvf ceres-solver-1.14.0.tar.gz +mkdir ceres-build && cd ceres-build +cmake ../ceres-solver-1.14.0 +make -j3 +sudo make install +``` + +### Pepper + +http://wiki.ros.org/pepper + +``` +$ sudo apt-get install ros-melodic-pepper-.* +$ sudo apt install ros-melodic-naoqi-bridge-msgs ros-melodic-naoqi-libqi ros-melodic-naoqi-driver ros-melodic-naoqi-libqicore +``` + +Download python SDK from https://developer.softbankrobotics.com/pepper-naoqi-25-downloads-linux. + +Extract `pynaoqi-python2.7-2.5.7.1-linux64.tar.gz` to `~/nao` + +Add naoqi python pythonSDK path to .bashrc: +``` +$ echo "export PYTHONPATH=${PYTHONPATH}:~/nao/pynaoqi-python2.7-2.5.7.1-linux64/lib/python2.7/site-packages" >> ~/.bashrc +``` + +Source packages: + +``` +$ cd ~/catkin_ws/src/ +$ git clone https://github.com/ros-naoqi/naoqi_dcm_driver +$ git clone https://github.com/ros-naoqi/naoqi_bridge +$ git clone https://github.com/ros-naoqi/pepper_robot +$ cd ~/catkin_ws/ +$ catkin_make +``` + +Disable audio in boot configuration file (set flag in line 85 from true to false): + +``` +$ sudo gedit /opt/ros/melodic/share/naoqi_driver/share/boot_config.json +``` + +Optional: + +Install Choregraph. + +If Choregraph fails to start, try: +``` +sudo ln -sf /usr/lib/x86_64-linux-gnu/libz.so /opt/'Softbank Robotics'/'Choregraphe Suite 2.5'/lib/libz.so.1 +``` + +## Hololens Navigation Installation + +Copy `HoloROSBridge` to `~/catkin_ws/src/HoloROSBridge` + +Copy `HoloLens_Localization` to `~/catkin_ws/src/HoloLens_Localization` + +Copy `navigation_launcher` to `~/catkin_ws/src/navigation_launcher` + +``` +$ cd ~/catkin_ws/src/HoloLens_Localization/scripts +$ chown $USER:$USER dynamic_adjuster.py +$ chmod +x dynamic_adjuster.py +$ chown $USER:$USER localizer.py +$ chmod +x localizer.py +``` + +## Build + +``` +$ cd ~/catkin_ws/ +$ catkin_make +``` + +## HoloLens Spatial Mapping application (Windows) + +Enable development mode on HoloLens. Pair device with PC. Enable Device Portal. + +Install Visual Studio 2019 with Universal Windows Platform build environment. + +After loading the solution in Visual Studio for the first time, navigate to +```Tools```->```NuGet Package Manager```->```Package Manager Console```. If +you see a message "Some NuGet packages are missing from this solution...", +click on ```Restore``` to download and install the missing ```Eigen``` package. +At the time of development, v3.3.3 of Eigen was used for this solution. + +For HoloLens generation 1: build with "Solution Configure:Release", "Solution Platform: x86" +For HoloLens generation 2: build with "Solution Configure:Release", "Solution Platform: ARM64" + + +If HoloLens device is connected via USB: + +![Config](images/config.png) + +Deploy target: "Device" + +If HoloLens is connected via WiFi, deploy target using Remote Machine settings. + +Deploy information: https://docs.microsoft.com/en-us/windows/mixed-reality/develop/platform-capabilities-and-apis/using-visual-studio + +After launching the application, you can air-tap to toggle between wirefreame and solid model render mode. + + diff --git a/Setup/TIPS.md b/Setup/TIPS.md new file mode 100644 index 0000000..2f51693 --- /dev/null +++ b/Setup/TIPS.md @@ -0,0 +1,102 @@ +# General Tips + +## HoloLens + +### Device Portal + +Enable Device Portal on the HoloLens device in `Settings->For developers` + +Navigate to the HoloLens IP using your web browser and set up a user/password. + +#### Certificate Installation +Navigate to the HoloLens IP using your web browser and download the HoloLens certificate. Convert and install it: + +``` +$ sudo apt-get install ca-certificates -y +$ openssl x509 -outform der -in certificate.pem -out certificate.crt +$ sudo cp certificate.crt /usr/local/share/ca-certificates +$ sudo update-ca-certificates +``` + +#### Change or disable HoloLens sleep settings +Navigate to the HoloLens IP using your web browser and log in with the user/pwd set above. In System->Preferences set sleep +settings. To disable, use the browser's inspect feature and add `0` as a list option, then select `0`. Note you will need +to do this twice if you want to disable sleep for both `battery` and `plugged-in` settings. + +## Pepper + +Using [qicli](http://doc.aldebaran.com/2-5/dev/libqi/guide/qicli.html) commands. + +### Start Pepper with autonomous life disabled + +Disable Pepper autonomous life mode using Choregraph: + +- Connect to Pepper using Choregraph. +- Click on blue heart icon in upper right corner. +- Wake Pepper up by clicking on sunshine icon in upper right corner + + Disable Pepper autonomous life mode using `ssh`: + +``` +$ ssh nao@ + > nao stop + > naoqi-bin --disable-life +``` + +Connect to Pepper again and: +``` +$ ssh nao@ + > qicli call ALMotion.wakeUp +``` + +To make Pepper go to sleep: +``` +$ ssh nao@ + > qicli call ALMotion.rest +``` + +To shut down Pepper: +``` +$ ssh nao@ + > sudo shutdown -h now +``` + +To get the joint names for the body or a chain: +``` +$ ssh nao@ + > qicli call ALMotion.getBodyNames "Body" +``` + +To view Pepper's current joint state: +``` +$ ssh nao@ + > qicli call ALMotion.getSummary +``` + +To change Pepper's head pitch: +``` +$ ssh nao@ + > qicli call ALMotion.setAngles "HeadPitch" 0.0 0.3 +``` + +The `setAngles` call is a non-blocking call. Parameters: +- `names` – The name or names of joints, chains, “Body”, “JointActuators”, “Joints” or “Actuators”. +- `angles` – One or more angles in radians +- `fractionMaxSpeed` – The fraction of maximum speed to use + +Valid Pepper joint names can be found [here](http://doc.aldebaran.com/2-5/family/romeo/joints_romeo.html?highlight=joint) and [here](https://developer.softbankrobotics.com/nao6/nao-documentation/nao-developer-guide/kinematics-data/effector-chain-definitions#nao-chains), +or call `getBodyNames` for a complete list. + +### Pepper RViz configuration file + +location: `/opt/ros/melodic/share/naoqi_driver/share/pepper.rviz` + + +### Miscellaneous + +Enumerate network interfaces on Ubuntu: + +``` +ip l show +ip a show eno1 +``` \ No newline at end of file diff --git a/Setup/images/config.png b/Setup/images/config.png new file mode 100644 index 0000000..890f752 Binary files /dev/null and b/Setup/images/config.png differ diff --git a/linux/HoloLens_Localization/CMakeLists.txt b/linux/HoloLens_Localization/CMakeLists.txt new file mode 100644 index 0000000..a122ea5 --- /dev/null +++ b/linux/HoloLens_Localization/CMakeLists.txt @@ -0,0 +1,63 @@ +cmake_minimum_required(VERSION 2.8.3) +project(hololens_localizer) + +add_compile_options(-std=c++11) + +find_package(catkin REQUIRED COMPONENTS + roscpp + rospy + tf +) + +find_package(Eigen3 REQUIRED) +find_package(Boost REQUIRED) +find_package(Ceres REQUIRED) + +catkin_package( +# INCLUDE_DIRS include +# LIBRARIES hololens_localizer +# CATKIN_DEPENDS roscpp rospy +# DEPENDS system_lib +) + +########### +## Build ## +########### + +## Specify additional locations of header files +## Your package locations should be listed before other locations + + +include_directories( + include/anchor_localizer + include/static_calibration + ${catkin_INCLUDE_DIRS} + ${EIGEN3_INCLUDE_DIR} + ${Boost_INCLUDE_DIRS} +) + +message("----------------------" ${INCLUDE_DIRS}) + +add_executable(anchor_localizer + src/anchor_localizer/main.cpp + src/anchor_localizer/ICP_module.cpp + ) + +target_link_libraries(anchor_localizer + ${catkin_LIBRARIES} +) + +add_executable(static_calibration + src/static_calibration/main.cpp + src/static_calibration/cost_function.cpp + src/static_calibration/teleop.cpp + src/static_calibration/calibration.cpp +) + +target_link_libraries(static_calibration + ${catkin_LIBRARIES} + ceres +) + + + diff --git a/linux/HoloLens_Localization/include/anchor_localizer/ICP_module.h b/linux/HoloLens_Localization/include/anchor_localizer/ICP_module.h new file mode 100644 index 0000000..88fcb0c --- /dev/null +++ b/linux/HoloLens_Localization/include/anchor_localizer/ICP_module.h @@ -0,0 +1,58 @@ +#include "ros/ros.h" + +#include +#include + +#include +#include +#include +#include +#include "eigen3/Eigen/Core" +#include "eigen3/Eigen/Geometry" + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include + + +namespace bg = boost::geometry; +namespace bgi = boost::geometry::index; + +typedef bg::model::point point; +typedef bg::model::box box; +typedef std::pair value; + +class ICP2D_Module +{ +public: + ICP2D_Module(ros::NodeHandle n); + void start(); + +private: + void callbackPC(const sensor_msgs::PointCloud::ConstPtr& pointcloud); + void callbackMap(const nav_msgs::OccupancyGrid::ConstPtr& subscribedmap); + void callbackInitPose(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr& initPose); + + void distFromPlane(geometry_msgs::Point32 pt,geometry_msgs::Point32& ptret,double& dist); + void linearCompute(std::vector ,std::vector,Eigen::Matrix2d& R,Eigen::Vector2d& T); + +#include + Eigen::Vector3d nv, t1, t2; + double h; // plane parameter + ros::Publisher pub, pubpre, pubimg, pubreq, pubarr; + ros::Subscriber sub, submap, subinitpose; + tf::TransformListener listener; + bgi::rtree< value, bgi::quadratic<16> > rtree; + double initx, inity, yaw; + bool bInit; +}; diff --git a/linux/HoloLens_Localization/include/static_calibration/calibration.h b/linux/HoloLens_Localization/include/static_calibration/calibration.h new file mode 100644 index 0000000..88c95e9 --- /dev/null +++ b/linux/HoloLens_Localization/include/static_calibration/calibration.h @@ -0,0 +1,159 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "teleop.h" +#include +#include +#include "cost_function.h" +#include "ceres/ceres.h" +#include "glog/logging.h" + +using ceres::AutoDiffCostFunction; +using ceres::NumericDiffCostFunction; +using ceres::CostFunction; +using ceres::Problem; +using ceres::Solver; +using ceres::Solve; +using namespace Eigen; + +class calibrator +{ +public: + calibrator(std::string holoLinkedFrame, std::string odomFrame, std::string robotFootFrame,std::string fpath); + void run(); +private: + void publish_thread(); + + void setTransform(); + void calibration(std::vector pep_pos, std::vector hol_pos, Matrix3d& R, Vector3d& T); + void horizontalCalibration(std::vector pep_pos, std::vector hol_pos, std::vector verticalVecs, std::vector normVecs, Matrix3d& R, Vector3d& T); + void transition_log(std::vector pep_pos, std::vector hol_pos,std::ofstream& ofs); + void linear_calibration(std::vector pep_pos, std::vector hol_pos, Matrix3d& R, Vector3d& T); + void nonlinear_calibration(std::vector pep_pos, std::vector hol_pos, Matrix3d& R, Vector3d& T); + void horizontal_initialization(std::vector pep_pos, std::vector hol_pos, std::vector verticalVecs, std::vector normVecs, Matrix3d& R, Vector3d& T, double* bestScores); + void nonlinear_horizontal_calibration(std::vector pep_pos, std::vector hol_pos, std::vector verticalVecs, std::vector normVecs, Matrix3d& R, Vector3d& T, double* bestScores); + void poseStampedCB(const geometry_msgs::PoseStamped::ConstPtr& msg); + bool lookupTransform(); + void recordCurrentPosition(); + void calibrate(); + bool setHeadPositionAndRecord(float fPitch, float fYaw); + bool autocalibrate(); + void writeCalibrationData(); + + std::vector m_pep_pos; + std::vector m_hol_pos; + std::vector m_floor2holo; + std::vector m_head2foot; + + geometry_msgs::PoseStamped m_latestPoseStamped; + + Matrix3d R; + Vector3d T; + + tf::StampedTransform m_transform1; + tf::StampedTransform m_transform2; + tf::StampedTransform m_transform3; + tf::StampedTransform m_transform4; + tf::TransformBroadcaster m_tf_br; + tf::StampedTransform m_tf_map_to_odom; + ros::Subscriber m_holo_floor_sub; + ros::NodeHandle m_nh; + + bool m_horizontalCalibMode = false; + + std::string m_holoLinkedFrame; + std::string m_odomFrame; + std::string m_robotFootFrame; + std::string m_fpath; + + tf::TransformListener m_listener; + teleop m_tele; + + int getch() + { + static struct termios oldt, newt; + tcgetattr( STDIN_FILENO, &oldt); // save old settings + newt = oldt; + newt.c_lflag &= ~(ICANON); // disable buffering + tcsetattr( STDIN_FILENO, TCSANOW, &newt); // apply new settings + + int c = getchar(); // read character + + tcsetattr( STDIN_FILENO, TCSANOW, &oldt); // restore old settings + return c; + } + + bool kbhit() + { + termios term; + tcgetattr(0, &term); + + termios term2 = term; + term2.c_lflag &= ~ICANON; + tcsetattr(0, TCSANOW, &term2); + + int byteswaiting; + ioctl(0, FIONREAD, &byteswaiting); + + tcsetattr(0, TCSANOW, &term); + + return byteswaiting > 0; + } + + Matrix4d btTrans2EigMat4d(tf::Transform t) + { + tf::Matrix3x3 btm(t.getRotation()); + Matrix4d m; + m< bool operator()(const T* const x, T* residual) const { + residual[0] = 10.0 - x[0]; + return true; + } +}; + +//e=|AX-XB| +struct simple_costfunctor +{ +public: + simple_costfunctor(Eigen::Matrix4d& A_,Eigen::Matrix4d& B_){A=A_;B=B_;}; + bool operator()(const double* parameters, double* residual)const{ + double tx=parameters[0]; + double ty=parameters[1]; + double tz=parameters[2]; + double rx=parameters[3]; + double ry=parameters[4]; + double rz=parameters[5]; + + Eigen::Matrix3d R=axisRot2R(rx,ry,rz); + Eigen::Matrix4d X; + X.block(0,0,3,3)=R; + X(0,3)=tx; + X(1,3)=ty; + X(2,3)=tz; + X.block(3,0,1,4)<<0,0,0,1; + //std::cout< +#include +#include +#include +#include +#include + +class teleop +{ +public: + teleop(); + + void operation(int c); + void setPepperHeadPitch(float angle); + void setPepperHeadYaw(float angle); + void setPepperHeadPitchYaw(float pitch, float yaw); + +private: + /** + * \brief calls m_bodyPoseClient on the poseName, to execute a body pose + * @return success of actionlib call + */ + bool callBodyPoseClient(const std::string& poseName); + +private: + bool m_fStiffness; + double m_maxHeadYaw; + double m_maxHeadPitch; + ros::Duration m_bodyPoseTimeOut; + ros::Publisher m_movePub; + ros::Publisher m_headPub; + ros::ServiceClient m_stiffnessDisableClient; + ros::ServiceClient m_stiffnessEnableClient; + ros::ServiceClient m_wakeup; + ros::ServiceClient m_rest; + actionlib::SimpleActionClient m_bodyPoseClient; +}; + diff --git a/linux/HoloLens_Localization/package.xml b/linux/HoloLens_Localization/package.xml new file mode 100644 index 0000000..c6b9d96 --- /dev/null +++ b/linux/HoloLens_Localization/package.xml @@ -0,0 +1,65 @@ + + + hololens_localizer + 0.0.0 + The hololens_localizer package + + + + + t-ryish + + + + + + TODO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + catkin + roscpp + rospy + roscpp + rospy + roscpp + rospy + + + + + + + + diff --git a/linux/HoloLens_Localization/scripts/dynamic_adjuster.py b/linux/HoloLens_Localization/scripts/dynamic_adjuster.py new file mode 100644 index 0000000..9a82a58 --- /dev/null +++ b/linux/HoloLens_Localization/scripts/dynamic_adjuster.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# -------------------------------------------------------------------------------------------- + +import rospy +import socket +import struct +import sys +import tf +import numpy as np +from geometry_msgs.msg import PoseWithCovarianceStamped +from std_msgs.msg import * +import math + + +# ----------------------------------------------------------------------------- +# +if __name__ == '__main__': + rospy.init_node('dynamic_adjuster') + listener=tf.TransformListener() + + if len(sys.argv)<2: + rospy.logerr("usage: dynamic_adjuster.py ") + exit() + + footprintFrame="/" + sys.argv[1] + + print "dynamic_adjuster: footprintframe: " + footprintFrame + + hololensPos = rospy.Publisher('/initialpose_h', PoseWithCovarianceStamped,queue_size=1) + + rate = rospy.Rate(10.0) + br = tf.TransformBroadcaster() + + last_update = rospy.get_time() + firstloop = True + adjuster = True + + while not rospy.is_shutdown(): + + #get transform from map to footprint via hololens + # map -> HoloLens + try: + #if listener.frameExists("/map") and listener.frameExists("/hololens"): + t = listener.getLatestCommonTime('/map', '/hololens') + (trans,rot) = listener.lookupTransform('/map', '/hololens', t) + #else: + # continue + except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException,tf.Exception): + print "tf error /map /hololens" + continue + try: + (trans2,rot2) = listener.lookupTransform('/hololens_p', footprintFrame, rospy.Time(0)) + except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException,tf.Exception): + print "tf error hololens_p footprintframe" + continue + + #position jump check caused by switching spatial anchor + if not firstloop: + if (last_t == t): + continue + + d = (trans[0]-last_trans[0])*(trans[0]-last_trans[0])+(trans[1]-last_trans[1])*(trans[1]-last_trans[1]) + now = rospy.get_time() + + if d > 1.0 and now - last_update < 2.0: + print "dynamic_adjuster: position jumped (ddist , dtime) (" + str(d) + "," + str(now-last_update) + continue + + #compute transform matrix from map to hololens_p + map2holo = np.dot(tf.transformations.translation_matrix(trans), tf.transformations.quaternion_matrix(rot)) + holo2foot = np.dot(tf.transformations.translation_matrix(trans2), tf.transformations.quaternion_matrix(rot2)) + + map2foot=np.dot(map2holo,holo2foot) + map2foot_beforeAdjust=map2foot + + #dynamic footprint correction + if adjuster: + foot2holo=np.linalg.inv(holo2foot) + + #calc adjust rotation matrix + foot2map=np.linalg.inv(map2foot) + rz=np.array([foot2map[0][2],foot2map[1][2],foot2map[2][2]]) + + to_z=np.array([0,0,1.0]) + axis=np.cross(rz,to_z) + angle=math.acos(np.dot(rz,to_z)) + adjustMatrixA=tf.transformations.quaternion_matrix(tf.transformations.quaternion_about_axis(angle,axis)) + adjustMatrixA=np.linalg.inv(adjustMatrixA) + #debug + debugMat=np.dot(map2foot,adjustMatrixA) + + # + adjustMatrixB=np.dot(np.dot(holo2foot,adjustMatrixA),foot2holo) + adjustMatrixB[0][3]=0 + adjustMatrixB[1][3]=0 + adjustMatrixB[2][3]=0 + map2foot=np.dot(np.dot(map2holo,adjustMatrixB),holo2foot) + + brtrans = (map2foot[0][3], map2foot[1][3], map2foot[2][3]) + brrot = tf.transformations.quaternion_from_matrix(map2foot) + br.sendTransform(brtrans, brrot, rospy.Time.now(), "/localized_footprint", "/map") + + brtrans = (map2foot_beforeAdjust[0][3], map2foot_beforeAdjust[1][3], map2foot_beforeAdjust[2][3]) + brrot = tf.transformations.quaternion_from_matrix(map2foot_beforeAdjust) + br.sendTransform(brtrans, brrot, rospy.Time.now(), "/localized_footprint_nAdj", "/map") + + cmd = PoseWithCovarianceStamped() + cmd.pose.pose.position.x=map2foot[0][3] + cmd.pose.pose.position.y=map2foot[1][3] + cmd.pose.pose.position.z=0 + q=tf.transformations.quaternion_from_matrix(map2foot) + invq=tf.transformations.quaternion_inverse(q) + + dirq= np.zeros((4, ), dtype=np.float64) + dirq[0]=1 + q1=tf.transformations.quaternion_multiply(dirq,invq) + q2=tf.transformations.quaternion_multiply(q,q1) + + qz=q2[0] + qw=q2[1] + rad=math.sqrt(qz*qz+qw*qw) + qz=qz/rad + qw=qw/rad + + theta = math.acos(qz) + if qw < 0: + theta=-theta + + cmd.pose.pose.orientation.x = 0 + cmd.pose.pose.orientation.y = 0 + cmd.pose.pose.orientation.z = math.sin(theta/2) + cmd.pose.pose.orientation.w = math.cos(theta/2) + hololensPos.publish(cmd) + last_trans = trans + last_update = rospy.get_time() + firstloop = False + last_t = t + rate.sleep() diff --git a/linux/HoloLens_Localization/scripts/localizer.py b/linux/HoloLens_Localization/scripts/localizer.py new file mode 100644 index 0000000..30d993f --- /dev/null +++ b/linux/HoloLens_Localization/scripts/localizer.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# -------------------------------------------------------------------------------------------- + +import roslib +import rospy +import socket +import geometry_msgs.msg +import math +import tf +import struct +import numpy as np +from geometry_msgs.msg import PoseWithCovarianceStamped + +global trans +global rot + +global brtrans +global brrot + +# ----------------------------------------------------------------------------- +# +def initialposeCB(msg): + #robot odom-base (input) + global trans + global rot + + #robot map-odom (output) + global brtrans + global brrot + + #massage to translation, rotation + inittrans=(msg.pose.pose.position.x,msg.pose.pose.position.y,msg.pose.pose.position.z) + initposequot=(msg.pose.pose.orientation.x, + msg.pose.pose.orientation.y, + msg.pose.pose.orientation.z, + msg.pose.pose.orientation.w) + initrot=tf.transformations.quaternion_matrix(initposequot) + map2foot= np.dot(tf.transformations.translation_matrix(inittrans),initrot) + odom2foot = np.dot(tf.transformations.translation_matrix(trans),tf.transformations.quaternion_matrix(rot)) + + + foot2odom=np.linalg.inv(odom2foot) + + map2odom=np.dot(map2foot,foot2odom) + br = tf.TransformBroadcaster() + #map2foot=np.dot(map2holo,holo2foot) + brtrans = (map2odom[0][3], map2odom[1][3], map2odom[2][3]) + brrot = tf.transformations.quaternion_from_matrix(map2odom) + +# ----------------------------------------------------------------------------- +# +if __name__ == '__main__': + rospy.init_node('localizer') + + listener = tf.TransformListener() + + # from ros + sub = rospy.Subscriber('/initialpose', PoseWithCovarianceStamped, initialposeCB) + + # from dynamic_adjuster.py + sub2 = rospy.Subscriber('/initialpose_h', PoseWithCovarianceStamped, initialposeCB) + + br = tf.TransformBroadcaster() + brtrans=(0,0, 0) + brrot=(0,0,0,1) + + rate = rospy.Rate(10) + while not rospy.is_shutdown(): + rospy.loginfo("Getting transform for '/base_footprint'!") + try: + # obtain robot odometry to base_footprint (for pepper) + (trans, rot) = listener.lookupTransform('/odom', '/base_footprint', rospy.Time(0)) + rospy.loginfo("Got transform for '/base_footprint'!") + except (tf.LookupException, tf.ConnectivityException,tf.ExtrapolationException): + rospy.logwarn("tf error. Unable to get transform for '/base_footprint'!") + continue + + br.sendTransform(brtrans, brrot, rospy.Time.now(), "/odom", "/map") + + rate.sleep() + + rospy.loginfo("localizer.py exit...") diff --git a/linux/HoloLens_Localization/src/anchor_localizer/ICP_module.cpp b/linux/HoloLens_Localization/src/anchor_localizer/ICP_module.cpp new file mode 100644 index 0000000..e9eb649 --- /dev/null +++ b/linux/HoloLens_Localization/src/anchor_localizer/ICP_module.cpp @@ -0,0 +1,420 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "ICP_module.h" + +const char * pszAppName = "hololens_anchor_localizer"; + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +ICP2D_Module::ICP2D_Module(ros::NodeHandle n) +{ + sub = n.subscribe("/hololens/pc",1000,&ICP2D_Module::callbackPC,this); + submap = n.subscribe("/map", 10, &ICP2D_Module::callbackMap, this); + + subinitpose = n.subscribe("/initialpose", 10, &ICP2D_Module::callbackInitPose, this); + + pub = n.advertise("/hololens/pc2d",1000); + pubpre = n.advertise("/hololens/pc2d_pre",1000); + pubimg = n.advertise("/hololens/image",1000); + pubreq = n.advertise("/hololens/ack",1000); + pubarr = n.advertise("hololens/localized",1000); + + //plane surface normal direction for cross section computation + nv << 0,1,0; + h = 0; + + t1 << 1,0,0; + t2 << 0,0,1; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void ICP2D_Module::callbackInitPose(const geometry_msgs::PoseWithCovarianceStamped::ConstPtr& initPose) +{ + // + // subscribe initial pose on Rviz + // + + // convert initial pose to 2D position and direction angle + initx = initPose->pose.pose.position.x; + inity = initPose->pose.pose.position.y; + + tf::Quaternion initq( + initPose->pose.pose.orientation.x, + initPose->pose.pose.orientation.y, + initPose->pose.pose.orientation.z, + initPose->pose.pose.orientation.w); + + tf::Matrix3x3 mat(initq); + + yaw = acos(mat[0][0]); + + if (mat[0][1] > 0) + yaw=-yaw; + + ROS_INFO("2D Pose Estimate request received. Requesting to initialize HoloLens bridge..."); + + // request initialize to bridge + std_msgs::Bool req; + + req.data = true; + + pubreq.publish(req); + + bInit = true; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void ICP2D_Module::callbackPC(const sensor_msgs::PointCloud::ConstPtr& pointcloud) +{ + ROS_INFO("HoloLens point cloud data received. Calculating optimal rotation and transform..."); + + // point cloud: SA coordinates + sensor_msgs::PointCloud pc2d,pc2d_prev; + double err; + geometry_msgs::Point32 pt2; + std::vector querypoints; + + // 3D to 2D :SA coordinates to ros coordinates + double range[4]={100,-100,100,-100};//minx,maxx,miny,maxy to determine map size + for (int i = 0; i < pointcloud->points.size(); i++) { + geometry_msgs::Point32 pt = pointcloud->points.at(i); + + distFromPlane(pt,pt2,err);//compute point2plane distance + + if (err < 0.1) { + Eigen::Vector2d qp;qp<<-pt2.z,-pt2.x; + querypoints.push_back(qp); + if (range[0] > qp(0)) + range[0] = qp(0); + if (range[1] < qp(0)) + range[1] = qp(0); + if (range[2] > qp(1)) + range[2] = qp(1); + if (range[3] < qp(1)) + range[3] = qp(1); + } + } + + //std::cout << range[0] <<"," << range[1] << "," << range[2] <<"," << range[3] <(img.width*img.height, 255); + + //initial pose for alignment + //given from Rviz(first time) or tf + if (!bInit) { + // obtain tf between map-SA(ROS) + try { + tf::StampedTransform transform; + listener.waitForTransform("/map", "/spatialAnchor_ros", pointcloud->header.stamp, ros::Duration(5.0)); + listener.lookupTransform(std::string("map"),std::string("/spatialAnchor_ros"), + pointcloud->header.stamp,transform); + initx = transform.getOrigin().getX(); + inity = transform.getOrigin().getY(); + tf::Matrix3x3 mat = transform.getBasis(); + yaw = acos(mat[0][0]); + if (mat[0][1] > 0) + yaw = -yaw; + } catch (tf::TransformException ex) { + ROS_ERROR("listener error!!"); + } + } + + bInit=false; + // 2D(x,y,yaw)-->2D R,t + tf::Quaternion initq; + Eigen::Matrix2d R; + Eigen::Vector2d T; + + R << cos(yaw),-sin(yaw),sin(yaw),cos(yaw); + T << initx,inity; + + std::cout << pszAppName << ": rotation matrix" << std::endl; + std::cout << /* std::setw(16) << */ R << std::endl; + std::cout << pszAppName << ": transform" << std::endl; + std::cout << /* std::setw(16) << */ T << std::endl; + std::cout << pszAppName << ": query point size: " << querypoints.size() << std::endl; + + //build pre-map + point cloud before alignment(for displaying) + for (int i = 0; i < querypoints.size(); i++) { + Eigen::Vector2d pt; + + pt << querypoints.at(i)(0),querypoints.at(i)(1); + + //compute correspond pixel coordinates + int plotx = img.width -((pt(0)-range[0])/pixresolution+margen); + int ploty = (pt(1)-range[2])/pixresolution+margen; + int idx = ploty * img.step + plotx; + img.data[idx] = 0; // fill pixel as black + + pt = R*pt + T; + + geometry_msgs::Point32 gp; + + gp.x = pt(0); + gp.y = pt(1); + gp.z = 0; + + pc2d_prev.points.push_back(gp); + } + + pc2d_prev.header.frame_id = "map"; + + // publish image and point cloud + pubpre.publish(pc2d_prev); + pubimg.publish(img); + + if (rtree.size() == 0) + return; // return if pre-map is not published + + { //cvt initial 2D rotation to 3D quaternion + tf::Matrix3x3 mat(R(0,0),R(0,1),0, + R(1,0),R(1,1),0, + 0,0,1); + + mat.getRotation(initq); + } + + // registration (simple 2d ICP alignment) + bool positionLost = false; + for (int itr = 0; itr < 10; itr++) { + std::vector idx; + std::vector dstpoints,querypoints_opt; + + if (querypoints.size() < 50) { + ROS_ERROR("data shortage!!"); + positionLost = true; + break; + } + + // searching nearest points + // printf("%s: searching nearest points...\n", pszAppName); + for (int i = 0; i < querypoints.size();) { + std::vector result_n; + Eigen::Vector2d qp; + + qp << querypoints[i](0),querypoints[i](1); + + //transform query point (HoloLens) + qp = R*qp + T; + + // search nearest point + rtree.query(bgi::nearest(point(qp(0),qp(1)),1),std::back_inserter(result_n)); + + // nearest point in pre-map + float x = result_n[0].first.get<0>(); + float y = result_n[0].first.get<1>(); + + Eigen::Vector2d dp; + + dp << x,y; + + if ((qp - dp).norm() > 1.0) { + i++; + continue; //outlier rejection (constant threshold: 1m) + } + + querypoints_opt.push_back(querypoints[i]); + dstpoints.push_back(dp); + i++; + } + + // printf("%s: finding optimal rotation and transform...\n", pszAppName); + + // obtain optimal R,t (solve ICP) + linearCompute(querypoints_opt,dstpoints,R,T); + + yaw = acos(R(0,0)); + if (R(0,1) > 0) + yaw = -yaw; + + // cvt 2D rotation to 3D quaternion + tf::Matrix3x3 mat(R(0,0),R(0,1),0, + R(1,0),R(1,1),0, + 0,0,1 + ); + + mat.getRotation(initq); + + initx = T(0); + inity = T(1); + + // std::cout << pszAppName << ": " << R << std::endl << initx << "," << inity << "," << yaw << std::endl; + } + + ROS_INFO("Calculating optimal rotation and transform completed: x=%.4f, y=%.4f, yaw=%.4f", (float)initx, (float)inity, (float)yaw); + + // publish optimized position and direction + geometry_msgs::Quaternion msgqt; + + tf::quaternionTFToMsg(initq, msgqt); + + geometry_msgs::PoseStamped ps; + + ps.pose.position.x = initx; + ps.pose.position.y = inity; + ps.pose.position.z = 0; + ps.pose.orientation = msgqt; + ps.header.frame_id = "map"; + + pubarr.publish(ps); + + // publish aligned 2D point cloud (for displaying) + for (int i = 0; i < querypoints.size();) { + std::vector result_n; + Eigen::Vector2d qp; + + qp << querypoints[i](0),querypoints[i](1); + qp = R*qp + T; + + geometry_msgs::Point32 gp; + + gp.x = qp(0); + gp.y = qp(1); + gp.z = 0; + + pc2d.points.push_back(gp); + i++; + } + + pc2d.header.frame_id = "map"; + + pub.publish(pc2d); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void ICP2D_Module::callbackMap(const nav_msgs::OccupancyGrid::ConstPtr& subscribedmap) +{ + // + // get floormap and create rtree + // + + if (rtree.size() > 0) + return; // process only once + + int width = subscribedmap->info.width; + int height = subscribedmap->info.height; + double scale = subscribedmap->info.resolution; + unsigned int cnt = 0; + + // cvt pixel to point cloud + for (int i = 0; i < width; i++) { + for (int j = 0; j < height; j++) { + if (subscribedmap->data[i+j*width] > 50) { + point p = point(i*scale+subscribedmap->info.origin.position.x,j*scale+subscribedmap->info.origin.position.y); + rtree.insert(std::make_pair(p, ++cnt)); + } + } + } + + ROS_INFO("map_server node initialized. Map size is %d x %d pixels. Tree size is %u points.", width, height, cnt); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void ICP2D_Module::distFromPlane(geometry_msgs::Point32 pt,geometry_msgs::Point32& ptret,double& dist) +{ + // + // compute point to plane distance + // + dist = pt.x*nv(0)+pt.y*nv(1)+pt.z*nv(2)-h; + ptret.x = pt.x-dist*nv(0); + ptret.y = pt.y-dist*nv(1); + ptret.z = pt.z-dist*nv(2); + dist = std::fabs(dist); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void ICP2D_Module::start() +{ + ros::Rate loop_rate(10); + + while(ros::ok()) { + loop_rate.sleep(); + ros::spinOnce(); + } +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void ICP2D_Module::linearCompute(std::vector a,std::vector b,Eigen::Matrix2d& R,Eigen::Vector2d& T) +{ + // + // solve ICP problem + // + + // get centroid + Eigen::Vector2d ga,gb; + + ga << 0,0; + gb << 0,0; + for (int i = 0; i < a.size(); i++) { + ga += a.at(i); + gb += b.at(i); + } + + ga = ga*(1.0/a.size()); + gb = gb*(1.0/a.size()); + + // + Eigen::Vector2d vga,vgb; + + vga << ga(0),ga(1); + vgb << gb(0),gb(1); + + Eigen::MatrixXd H(2,2); + H.setZero(); + + // std::cout << pszAppName << ": " << ga.transpose() << ":" << gb.transpose() << std::endl; + + for (int i = 0; i < a.size(); i++) { + Eigen::Vector2d pa,pb; + + pa << a.at(i)(0),a.at(i)(1); + pb << b.at(i)(0),b.at(i)(1); + + Eigen::MatrixXd h_=(pa-vga)*(pb-vgb).transpose(); + + H=H+h_; + } + + Eigen::JacobiSVD svd(H, Eigen::ComputeThinU | Eigen::ComputeThinV); + Eigen::Matrix2d Hm; + Eigen::Matrix2d uvt = svd.matrixU()*svd.matrixV().transpose(); + + Hm << 1,0,0,uvt(0,0)*uvt(1,1)-uvt(0,1)*uvt(1,0); + + R = svd.matrixV()*Hm*svd.matrixU().transpose(); + +/* if(R.determinant()<0){ + + R.col(2)=-1*R.col(2); + + }*/ + + T = -R*vga+vgb; + + return; +} diff --git a/linux/HoloLens_Localization/src/anchor_localizer/main.cpp b/linux/HoloLens_Localization/src/anchor_localizer/main.cpp new file mode 100644 index 0000000..ef60ab9 --- /dev/null +++ b/linux/HoloLens_Localization/src/anchor_localizer/main.cpp @@ -0,0 +1,28 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "ros/ros.h" +#include "ICP_module.h" + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +int main(int argc, char* argv[]) +{ + ros::init(argc,argv, "anchor_localizer"); + ros::NodeHandle n; + + ICP2D_Module icp_module(n); + + icp_module.start(); + + return 0; +} diff --git a/linux/HoloLens_Localization/src/static_calibration/calibration.cpp b/linux/HoloLens_Localization/src/static_calibration/calibration.cpp new file mode 100644 index 0000000..53a9845 --- /dev/null +++ b/linux/HoloLens_Localization/src/static_calibration/calibration.cpp @@ -0,0 +1,789 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "calibration.h" + +void printHelp(); + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +calibrator::calibrator(std::string holoLinkedFrame, std::string odomFrame, std::string robotFootFrame, std::string fpath) +{ + // + // constructor with file path + + m_holoLinkedFrame = holoLinkedFrame; + m_odomFrame = odomFrame; + m_robotFootFrame = robotFootFrame; + m_fpath = fpath; + + // + // initialize transform + R << 1, 0, 0, 0, 1, 0, 0, 0, 1; + T << 0, 0, 0; + + if (!m_fpath.empty()) { + std::ifstream ifs(m_fpath, std::ios::binary); + if (ifs) + { + // + // file exists, read data from it + float dat[12]; + ifs.read((char *)dat, sizeof(float) * 12); + R << dat[0], dat[1], dat[2], dat[3], dat[4], dat[5], dat[6], dat[7], dat[8]; + T << dat[9], dat[10], dat[11]; + ifs.close(); + + std::cout << "Calibration file '" << m_fpath << "' loaded." << std::endl; + } + else + { + // + // file does not exist, write initial data + writeCalibrationData(); + } + } + + // set transformation + setTransform(); + + m_horizontalCalibMode = false; + boost::thread *thr = new boost::thread(boost::bind(&calibrator::publish_thread, this)); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::setTransform() +{ + m_tf_map_to_odom.frame_id_ = std::string(m_holoLinkedFrame); + m_tf_map_to_odom.child_frame_id_ = std::string("hololens_p"); + m_tf_map_to_odom.setOrigin(tf::Vector3(T(0), T(1), T(2))); + + tf::Matrix3x3 rot(R(0, 0), R(0, 1), R(0, 2), + R(1, 0), R(1, 1), R(1, 2), + R(2, 0), R(2, 1), R(2, 2)); + tf::Quaternion q; + rot.getRotation(q); + m_tf_map_to_odom.setRotation(q); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::poseStampedCB(const geometry_msgs::PoseStamped::ConstPtr &msg) +{ + m_latestPoseStamped = *msg; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::run() +{ + m_holo_floor_sub = m_nh.subscribe < geometry_msgs::PoseStamped>("/holo/floor", 1, &calibrator::poseStampedCB, this); + + std::cout << "Calibration: ready" << std::endl; + + while (ros::ok()) + { + int c; + bool fSuccess; + + //get keyboard input + if (kbhit()) + { + std::cout << "\r \r"; + c = getch(); + } + else + { + ros::spinOnce(); + continue; + } + + fSuccess = lookupTransform(); + if (!fSuccess) + continue; + + if (c == ' ') // space key: record current position + { + recordCurrentPosition(); + } + else if (c == 'c') // calibration + { + calibrate(); + } + else if (c == 'a') // auto-calibration + { + autocalibrate(); + } + else if (c == 'q') // quit + { + break; + } + else if (c == 'w') // save calibration result + { + writeCalibrationData(); + } + else if (c == 'd') // delete recorded position + { + if (m_pep_pos.size() >= 1) + { + m_pep_pos.pop_back(); + m_hol_pos.pop_back(); + } + } + else if (c == 'z') // save to log + { + std::ofstream ofs(m_fpath + ".log"); + std::cout << "Param R" << std::endl; + std::cout << R << std::endl; + std::cout << "Param t" << std::endl; + std::cout << T << std::endl; + ofs << "Param R" << std::endl; + ofs << R << std::endl; + ofs << "Param t" << std::endl; + ofs << T << std::endl; + } + else if (c == 't') // toggle calibration mode (General hand-eye calibration <-> Calibration with limited movements (Using Horizontal movement)) + { + m_horizontalCalibMode = !m_horizontalCalibMode; + + if (m_horizontalCalibMode) + std::cout << "horizontal calibration mode: Horizontal" << std::endl; + else + std::cout << "horizontal calibration mode: Bi-directional rotation" << std::endl; + + //reset + m_pep_pos.clear(); + m_hol_pos.clear(); + m_floor2holo.clear(); + m_head2foot.clear(); + } + else if ((c == '?') || (c == 'h')) + { + printHelp(); + } + else // tele-operation + { + m_tele.operation(c); + } + } +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::publish_thread() +{ + ros::Rate loop_rate(100); + while (ros::ok()) + { + // broadcast transform + m_tf_map_to_odom.stamp_ = ros::Time::now(); + m_tf_br.sendTransform(m_tf_map_to_odom); + + loop_rate.sleep(); + } +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +bool calibrator::lookupTransform() +{ + try + { + // + // get Odom-holoLinkedFrame, SA-HoloLens transformation + m_listener.lookupTransform(std::string(m_odomFrame), std::string(m_holoLinkedFrame), ros::Time(0), m_transform1); + m_listener.lookupTransform(std::string("spatialAnchor"), std::string("hololens"), ros::Time(0), m_transform2); + } + catch (tf::TransformException ex) + { + std::cout << "Calibration: tf listener error!! " << ex.what() << std::endl; + return false; + } + + return true; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::recordCurrentPosition() +{ + if (m_horizontalCalibMode) + { + try + { // obtain map-hololens, Head-base + m_listener.waitForTransform("/map", "/hololens", m_latestPoseStamped.header.stamp, ros::Duration(5.0)); + m_listener.lookupTransform(std::string("map"), std::string("hololens"), m_latestPoseStamped.header.stamp, m_transform3); + m_listener.lookupTransform(std::string(m_holoLinkedFrame), std::string(m_robotFootFrame), m_latestPoseStamped.header.stamp, m_transform4); + } + catch (tf::TransformException ex) + { + std::cout << "Calibration: tf listener error!! " << ex.what() << std::endl; + return; + } + + Eigen::Vector3d floor2holoVec, head2footVec; + // obtain floor to HoloLens vector (map to HoloLens coordinate) + floor2holoVec << m_transform3.getOrigin().getX() - m_latestPoseStamped.pose.position.x, m_transform3.getOrigin().getY() - m_latestPoseStamped.pose.position.y, m_transform3.getOrigin().getZ() - m_latestPoseStamped.pose.position.z; + Matrix4d map2holo = btTrans2EigMat4d(m_transform3); + floor2holoVec = map2holo.block(0, 0, 3, 3).inverse() * floor2holoVec; + m_floor2holo.push_back(floor2holoVec); + + head2footVec << m_transform4.getOrigin().getX(), m_transform4.getOrigin().getY(), m_transform4.getOrigin().getZ(); + m_head2foot.push_back(head2footVec); + } + + // Odom-holoLinkedFrame (Robot) + m_pep_pos.push_back(m_transform1); + + // SA-HoloLens + m_hol_pos.push_back(m_transform2); + + // display position + tf::Quaternion q = m_transform1.getRotation(); + std::cout << "Calibration: count " << m_pep_pos.size() << std::endl; + std::cout << "Calibration: " << m_holoLinkedFrame << ":(" << m_transform1.getOrigin().getX() << "," << m_transform1.getOrigin().getY() << "," << m_transform1.getOrigin().getZ() << "),(" << q.getX() << "," << q.getY() << "," << q.getZ() << "," << q.getW() << ")" << std::endl; + q = m_transform2.getRotation(); + std::cout << "Calibration: HoloLens:(" << m_transform2.getOrigin().getX() << "," << m_transform2.getOrigin().getY() << "," << m_transform2.getOrigin().getZ() << "),(" << q.getX() << "," << q.getY() << "," << q.getZ() << "," << q.getW() << ")" << std::endl; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::calibrate() +{ + if (m_pep_pos.size() < 3) + { + std::cout << "Calibration Error!! The number of recorded positions must be >=3 !!" << std::endl; + return; + } + + std::cout << "======= calibration start ========" << std::endl; + if (!m_horizontalCalibMode) + { + calibration(m_pep_pos, m_hol_pos, R, T); + } + else + { + horizontalCalibration(m_pep_pos, m_hol_pos, m_floor2holo, m_head2foot, R, T); + } + + // display calibration result + std::cout << "======= calibration done =========" << std::endl; + std::cout << "Calibration results:\n translation" << std::endl + << T << std::endl + << "rotation" << std::endl + << R << std::endl; + m_tf_map_to_odom.setOrigin(tf::Vector3(T(0), T(1), T(2))); + tf::Matrix3x3 rot(R(0, 0), R(0, 1), R(0, 2), + R(1, 0), R(1, 1), R(1, 2), + R(2, 0), R(2, 1), R(2, 2)); + tf::Quaternion q; + rot.getRotation(q); + m_tf_map_to_odom.setRotation(q); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +bool calibrator::setHeadPositionAndRecord(float fPitch, float fYaw) +{ + bool fSuccess; + double duration = 3.0; + + m_tele.setPepperHeadPitchYaw(fPitch, fYaw); + + ros::Duration(duration).sleep(); + ros::spinOnce(); + + fSuccess = lookupTransform(); + if (!fSuccess) + return false; + + recordCurrentPosition(); + + return true; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +bool calibrator::autocalibrate() +{ + bool fSuccess; + + std::cout << "auto-calibrate: step 1 of 5 - resetting pepper head and recording position..." << std::endl; + if (!setHeadPositionAndRecord(0.0, 0.0)) + return false; + + std::cout << "auto-calibrate: step 2 of 5 - setting pepper head position A and recording position..." << std::endl; + if (!setHeadPositionAndRecord(-0.35, 0.0)) + return false; + + std::cout << "auto-calibrate: step 3 of 5 - setting pepper head position B and recording position..." << std::endl; + if (!setHeadPositionAndRecord(0.00, 0.7)) + return false; + + std::cout << "auto-calibrate: step 4 of 5 - setting pepper head position C and recording position..." << std::endl; + if (!setHeadPositionAndRecord(0.00, -0.7)) + return false; + + std::cout << "auto-calibrate: step 5 of 5 - resetting pepper head and calibrating..." << std::endl; + m_tele.setPepperHeadPitchYaw(0.0, 0.0); + + ros::spinOnce(); + + fSuccess = lookupTransform(); + if (!fSuccess) + return false; + + calibrate(); + + return true; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::writeCalibrationData() +{ + if (m_fpath.empty()) + return; + + std::ofstream ofs(m_fpath); + float dat[12]; + + dat[0] = R(0, 0); + dat[1] = R(0, 1); + dat[2] = R(0, 2); + dat[3] = R(1, 0); + dat[4] = R(1, 1); + dat[5] = R(1, 2); + dat[6] = R(2, 0); + dat[7] = R(2, 1); + dat[8] = R(2, 2); + dat[9] = T(0); + dat[10] = T(1); + dat[11] = T(2); + + ofs.write((char *)dat, sizeof(float) * 12); + ofs.close(); + + std::cout << "Calibration file '" << m_fpath << "' saved." << std::endl; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::calibration(std::vector pep_pos, std::vector hol_pos, Matrix3d &R, Vector3d &T) +{ + linear_calibration(pep_pos, hol_pos, R, T); + std::cout << "======== finished initial parameter computation ========" << std::endl; + nonlinear_calibration(pep_pos, hol_pos, R, T); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::horizontalCalibration(std::vector pep_pos, std::vector hol_pos, std::vector floor_holo, std::vector head_foot, Matrix3d &R, Vector3d &T) +{ + double bestScores[2]; + horizontal_initialization(pep_pos, hol_pos, floor_holo, head_foot, R, T, bestScores); + std::cout << "======== finished initial parameter computation ========" << std::endl; + nonlinear_horizontal_calibration(pep_pos, hol_pos, floor_holo, head_foot, R, T, bestScores); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::linear_calibration(std::vector pep_pos, std::vector hol_pos, Matrix3d &R, Vector3d &T) +{ + std::vector pepMat, pepdMat; + std::vector holMat, holdMat; + + // obtain motion + for (int i = 0; i < pep_pos.size(); i++) + { + Matrix4d mp = btTrans2EigMat4d(pep_pos.at(i)); + Matrix4d mh = btTrans2EigMat4d(hol_pos.at(i)); + pepMat.push_back(mp); + holMat.push_back(mh); + if (i > 0) + { + pepdMat.push_back(pepMat.at(i - 1).inverse() * pepMat.at(i)); + holdMat.push_back(holMat.at(i - 1).inverse() * holMat.at(i)); + } + } + + // Rotation calibration + // obtain axis from motion + std::vector pepAxis, holAxis; + MatrixXd KA(3, pepdMat.size()), KB(3, pepdMat.size()); + for (int i = 0; i < pepdMat.size(); i++) + { + Vector3d pepax = mat2axis(pepdMat.at(i)); + Vector3d holax = mat2axis(holdMat.at(i)); + KA.col(i) = pepax; + KB.col(i) = holax; + } + + MatrixXd KBKA = KB * KA.transpose(); + Eigen::JacobiSVD svd(KBKA, Eigen::ComputeThinU | Eigen::ComputeThinV); + Eigen::Matrix3d Hm; + Eigen::Matrix3d uvt = svd.matrixU() * svd.matrixV().transpose(); + + Hm << 1, 0, 0, + 0, 1, 0, + 0, 0, uvt.determinant(); + R = svd.matrixV() * Hm * svd.matrixU().transpose(); + + // Translation calibration + // solve least square problem for t + MatrixXd A(pepdMat.size() * 2, 3); + VectorXd B(pepdMat.size() * 2); + + for (int i = 0; i < pepdMat.size(); i++) + { + Vector3d tpep, thol; + tpep << pepdMat.at(i)(0, 3), pepdMat.at(i)(1, 3), pepdMat.at(i)(2, 3); + thol << holdMat.at(i)(0, 3), holdMat.at(i)(1, 3), holdMat.at(i)(2, 3); + Vector3d rightt = tpep - R * thol; + Matrix3d leftm = Matrix3d::Identity() - pepdMat.at(i).block(0, 0, 3, 3); + A.block(i * 2, 0, 2, 3) = leftm.block(0, 0, 2, 3); + B(i * 2) = rightt(0); + B(i * 2 + 1) = rightt(1); + } + + T = A.jacobiSvd(ComputeThinU | ComputeThinV).solve(B); + std::cout << "Rotation (Initial)" << std::endl + << R << std::endl; + std::cout << "Translation (Initial)" << std::endl + << T << std::endl; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::nonlinear_calibration(std::vector pep_pos, std::vector hol_pos, Matrix3d &R, Vector3d &T) +{ + double opt[6]; + double optrot[3]; + double opttrans[3]; + + R2axisRot(R, opt[3], opt[4], opt[5]); + R2axisRot(R, optrot[0], optrot[1], optrot[2]); + + opt[0] = T(0); + opt[1] = T(1); + opt[2] = T(2); + + opttrans[0] = T(0); + opttrans[1] = T(1); + opttrans[2] = T(2); + + Problem problem, problem_a, problem_b; + + std::vector pepMat, pepdMat; + std::vector holMat, holdMat; + + // Obtain motion + for (int i = 0; i < pep_pos.size(); i++) + { + Matrix4d mp = btTrans2EigMat4d(pep_pos.at(i)); + Matrix4d mh = btTrans2EigMat4d(hol_pos.at(i)); + pepMat.push_back(mp); + holMat.push_back(mh); + if (i > 0) + { + pepdMat.push_back(pepMat.at(i - 1).inverse() * pepMat.at(i)); + holdMat.push_back(holMat.at(i - 1).inverse() * holMat.at(i)); + } + } + + // Construct cost function + for (int i = 0; i < pepdMat.size(); i++) + { + Matrix4d A = pepdMat.at(i); + Matrix4d B = holdMat.at(i); + double anglea, angleb; + Vector3d a_ax; + mat2axis_angle(A, a_ax, anglea); + Vector3d b_ax; + mat2axis_angle(B, b_ax, angleb); + //Function 1: Optimize rotation + CostFunction *cost_function1d = new NumericDiffCostFunction(new F1_(a_ax, b_ax)); + problem_a.AddResidualBlock(cost_function1d, NULL, optrot); + //Function 2: Optimize translation + CostFunction *cost_function2d = new NumericDiffCostFunction(new F2_(A, B, optrot)); + problem_b.AddResidualBlock(cost_function2d, NULL, opttrans); + } + + Solver::Options options; + options.minimizer_progress_to_stdout = true; + Solver::Summary summary; + + // Rotation optimization + ceres::Solve(options, &problem_a, &summary); + + // Translation optimization + ceres::Solve(options, &problem_b, &summary); + + std::cout << "finished optimization\n"; + std::cout << summary.BriefReport() << "\n"; + + R = axisRot2R(optrot[0], optrot[1], optrot[2]); + T << opttrans[0], opttrans[1], opttrans[2]; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::horizontal_initialization(std::vector pep_pos, std::vector hol_pos, std::vector verticalVecs, std::vector normVecs, Matrix3d &R, Vector3d &T, double *bestScores) +{ + // Compute motion + std::vector RobotMat, A_Mat; + std::vector holMat, B_Mat; + + for (int i = 0; i < pep_pos.size(); i++) + { + Matrix4d mp = btTrans2EigMat4d(pep_pos.at(i)); + Matrix4d mh = btTrans2EigMat4d(hol_pos.at(i)); + RobotMat.push_back(mp); + holMat.push_back(mh); + if (i > 0) + { + A_Mat.push_back(RobotMat.at(i - 1).inverse() * RobotMat.at(i)); + B_Mat.push_back(holMat.at(i - 1).inverse() * holMat.at(i)); + } + } + + // Rotation calibration + std::vector RobotAxis, holAxis; + std::cout << "axis of rotation matrix" << std::endl; + + // Obtain rotational axis + for (int i = 0; i < A_Mat.size(); i++) + { + double anglea, angleb; + Vector3d pepax; + mat2axis_angle(A_Mat.at(i), pepax, anglea); + Vector3d holax; + mat2axis_angle(A_Mat.at(i), holax, angleb); + if (anglea < 0.1 || angleb < 0.1) + continue; // reject small rotational movement + RobotAxis.push_back(pepax); + holAxis.push_back(holax); + } + + if (RobotAxis.size() == 0) + { + std::cout << "initialization failed!! (need large rotaion)\nPlease check how to move..." << std::endl; + return; + } + + // obtain forward movement + double offset_trans = 0.1; + double offset = 0.01; + for (int i = 0; i < A_Mat.size(); i++) + { + double anglea, angleb; + Vector3d pepax; + mat2axis_angle(A_Mat.at(i), pepax, anglea); + Vector3d holax; + mat2axis_angle(B_Mat.at(i), holax, angleb); + Vector3d ta = A_Mat.at(i).block(0, 3, 3, 1); + double score = ta.norm() / (fabs(anglea) + offset); // (movement length)/(rotational angle) + if (2 < score) // 2: threshold + { + RobotAxis.push_back(ta.normalized()); + Vector3d tb = B_Mat.at(i).block(0, 3, 3, 1); + holAxis.push_back(tb.normalized()); + } + } + + // Rotation calibration (SVD) + MatrixXd KA(3, RobotAxis.size()), KB(3, RobotAxis.size()); + for (int i = 0; i < RobotAxis.size(); i++) + { + KA.col(i) = RobotAxis.at(i); + KB.col(i) = holAxis.at(i); + } + + MatrixXd KBKA = KB * KA.transpose(); + Eigen::JacobiSVD svd(KBKA, Eigen::ComputeThinU | Eigen::ComputeThinV); + Eigen::Matrix3d Hm; + Eigen::Matrix3d uvt = svd.matrixU() * svd.matrixV().transpose(); + + Hm << 1, 0, 0, + 0, 1, 0, + 0, 0, uvt.determinant(); + R = svd.matrixV() * Hm * svd.matrixU().transpose(); + + // Translation calibration + std::vector left_vectors; + std::vector right_values; + + // vertical trans component + for (int i = 0; i < verticalVecs.size(); i++) + { + Eigen::Vector3d floor2holo = verticalVecs.at(i); + Eigen::Vector3d head2foot = normVecs.at(i); + double right_value = floor2holo.norm() + floor2holo.dot(R.transpose() * head2foot); //in hololens frame + Eigen::Vector3d left_vector = (floor2holo.transpose() * R.transpose()).transpose(); + left_vectors.push_back(left_vector); + right_values.push_back(right_value); + } + + // horizontal trans component + for (int i = 0; i < A_Mat.size(); i++) + { + double anglea, angleb; + Vector3d pepax; + mat2axis_angle(A_Mat.at(i), pepax, anglea); + Vector3d holax; + mat2axis_angle(B_Mat.at(i), holax, angleb); + Vector3d ta = A_Mat.at(i).block(0, 3, 3, 1); + Vector3d tb = B_Mat.at(i).block(0, 3, 3, 1); + if (anglea < 0.1 || angleb < 0.1) + continue; + Vector3d rightt = ta - R * tb; + Matrix3d leftM = Matrix3d::Identity() - A_Mat.at(i).block(0, 0, 3, 3); + for (int j = 0; j < 3; j++) + { + right_values.push_back(rightt(j)); + left_vectors.push_back(leftM.row(j).transpose()); + } + } + + // solve t by SVD + MatrixXd A(right_values.size(), 3); + VectorXd B(right_values.size()); + + for (int i = 0; i < right_values.size(); i++) + { + A.block(i, 0, 1, 3) = left_vectors.at(i).transpose(); + B(i) = right_values.at(i); + } + + T = A.jacobiSvd(ComputeThinU | ComputeThinV).solve(B); + + std::cout << "Rotation (Initial)\n" + << R << std::endl; + std::cout << "Translation (Initial)" << std::endl + << T << std::endl; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void calibrator::nonlinear_horizontal_calibration(std::vector pep_pos, std::vector hol_pos, std::vector floor_holo, std::vector head_foot, Matrix3d &R, Vector3d &T, double *bestScores) +{ + //decompose + double opt[6]; + double optrot[3]; + double opttrans[3]; + + R2axisRot(R, opt[3], opt[4], opt[5]); + R2axisRot(R, optrot[0], optrot[1], optrot[2]); + + opt[0] = T(0); + opt[1] = T(1); + opt[2] = T(2); + + opttrans[0] = T(0); + opttrans[1] = T(1); + opttrans[2] = T(2); + + // optimize (2 patterns for comparison) + // problem: simultaneous 6 parameters optimization + // problem a, b: optimize sepalately (3 rotation, 3 translation) + Problem problem, problem_a, problem_b; + + std::vector pepMat, pepdMat; + std::vector holMat, holdMat; + double offset_trans = 0.1; + double offset_angle = 0.2; + double offset = 0.01; + + for (int i = 1; i < pep_pos.size(); i++) + { // rotation estimation by horizontal rotation + // obtain motion (mh3,mp3) + Matrix4d mp = btTrans2EigMat4d(pep_pos.at(i - 1)); + Matrix4d mh = btTrans2EigMat4d(hol_pos.at(i - 1)); + Matrix4d mp2 = btTrans2EigMat4d(pep_pos.at(i)); + Matrix4d mh2 = btTrans2EigMat4d(hol_pos.at(i)); + Matrix4d mh3, mp3; + mp3 = mp2.inverse() * mp; + mh3 = mh2.inverse() * mh; + double anglea, angleb; + Vector3d pepax; + + mat2axis_angle(mp3, pepax, anglea); + + Vector3d holax; + + mat2axis_angle(mh3, holax, angleb); + + Vector3d ta = mp3.block(0, 3, 3, 1); + Vector3d tb = mh3.block(0, 3, 3, 1); + double scorea = (sqrt(ta.norm()) - sqrt(offset_trans)) / (fabs(anglea) + offset); // translation score + double scoreb = (sqrt(fabs(anglea)) - sqrt(offset_angle)) / (ta.norm() + offset); // rotation score + + if (scoreb >= bestScores[1] * 0.95) // straight movement + { + CostFunction *cost_function1 = new NumericDiffCostFunction(new F1(pepax, holax)); + problem.AddResidualBlock(cost_function1, NULL, opt); + CostFunction *cost_function2 = new NumericDiffCostFunction(new F2(mp3, mh3)); + problem.AddResidualBlock(cost_function2, NULL, opt); + + CostFunction *cost_function1d = new NumericDiffCostFunction(new F1_(pepax, holax)); + problem_a.AddResidualBlock(cost_function1d, NULL, optrot); + CostFunction *cost_function2d = new NumericDiffCostFunction(new F2_(mp3, mh3, optrot)); + problem_b.AddResidualBlock(cost_function2d, NULL, opttrans); + } + + if (scorea >= bestScores[0] * 0.95) // rotational movement + { + Vector3d ta_n = ta.normalized(); + Vector3d tb_n = tb.normalized(); + CostFunction *cost_function1 = new NumericDiffCostFunction(new F1(ta_n, tb_n)); + problem.AddResidualBlock(cost_function1, NULL, opt); + CostFunction *cost_function1d = new NumericDiffCostFunction(new F1_(ta_n, tb_n)); + problem_a.AddResidualBlock(cost_function1d, NULL, optrot); + } + } + + for (int i = 0; i < floor_holo.size(); i++) + { // height information + Eigen::Vector3d fh = floor_holo.at(i); + Eigen::Vector3d hf = head_foot.at(i); + CostFunction *cost_function3 = new NumericDiffCostFunction(new F3(fh, hf)); + problem.AddResidualBlock(cost_function3, NULL, opt); + CostFunction *cost_function3d = new NumericDiffCostFunction(new F3_(fh, hf, optrot)); + problem_b.AddResidualBlock(cost_function3d, NULL, opttrans); + } + + Solver::Options options; + options.minimizer_progress_to_stdout = true; + Solver::Summary summary; + + ceres::Solve(options, &problem, &summary); // pattern 1 + ceres::Solve(options, &problem_a, &summary); // pattern 2 a (rotation) + ceres::Solve(options, &problem_b, &summary); // pattern 2 b (translation) + + std::cout << "finished optimization\n"; + std::cout << summary.BriefReport() << "\n"; + + R = axisRot2R(opt[3], opt[4], opt[5]); + T << opt[0], opt[1], opt[2]; + + // pattern 1 result (just for comparison) + std::cout << R << std::endl; + std::cout << T << std::endl; + + R = axisRot2R(optrot[0], optrot[1], optrot[2]); + T << opttrans[0], opttrans[1], opttrans[2]; + + // pattern 2 result (used as a calibration result) + std::cout << R << std::endl; + std::cout << T << std::endl; +} diff --git a/linux/HoloLens_Localization/src/static_calibration/cost_function.cpp b/linux/HoloLens_Localization/src/static_calibration/cost_function.cpp new file mode 100644 index 0000000..a0cc9c2 --- /dev/null +++ b/linux/HoloLens_Localization/src/static_calibration/cost_function.cpp @@ -0,0 +1,53 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "cost_function.h" + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +Eigen::Matrix3d axisRot2R(double rx, double ry, double rz) +{ + // + // rotation conversion: Eular angle to matrix + Eigen::Matrix4d R,rotx,roty,rotz; + double sinv,cosv; + + sinv = sin(rx); + cosv = cos(rx); + rotx << 1,0,0,0,0,cosv,-sinv,0,0,sinv,cosv,0,0,0,0,1; + + sinv = sin(ry); + cosv = cos(ry); + roty << cosv,0,sinv,0,0,1,0,0,-sinv,0,cosv,0,0,0,0,1; + + sinv = sin(rz); + cosv = cos(rz); + rotz << cosv,-sinv,0,0,sinv,cosv,0,0,0,0,1,0,0,0,0,1; + + R = rotx * roty * rotz; + + Eigen::Matrix3d retMat = R.block(0,0,3,3); + + return retMat; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void R2axisRot(Eigen::Matrix3d R,double& rx,double& ry,double& rz) +{ + // + // rotation conversion: Matrix to Eular angle + ry = asin(R(0,2)); + rx = -atan2(R(1,2),R(2,2)); + rz = -atan2(R(0,1),R(0,0)); +} + diff --git a/linux/HoloLens_Localization/src/static_calibration/main.cpp b/linux/HoloLens_Localization/src/static_calibration/main.cpp new file mode 100644 index 0000000..8dc4ed7 --- /dev/null +++ b/linux/HoloLens_Localization/src/static_calibration/main.cpp @@ -0,0 +1,86 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "ros/ros.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "calibration.h" + +using namespace Eigen; + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void printHelp() +{ + std::cout<<"---------------------------------------------------------"< "<= 5) { + fpath = std::string(argv[4]); + } + + printHelp(); + + // set up publish rate + calibrator calib(holoLinkedFrame, odomFrame, robotFootFrame, fpath); + + calib.run(); + + return 0; +} diff --git a/linux/HoloLens_Localization/src/static_calibration/teleop.cpp b/linux/HoloLens_Localization/src/static_calibration/teleop.cpp new file mode 100644 index 0000000..8570ac9 --- /dev/null +++ b/linux/HoloLens_Localization/src/static_calibration/teleop.cpp @@ -0,0 +1,221 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "teleop.h" +#include +#include + +// +// see https://github.com/ros-naoqi/nao_extras/blob/master/nao_teleop/src/teleop_nao_joy.cpp + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +teleop::teleop() : + m_fStiffness(false), + m_maxHeadYaw(2.0943), m_maxHeadPitch(0.7853), + m_bodyPoseTimeOut(5.0), + m_bodyPoseClient("body_pose", true) +{ + ros::NodeHandle nh; + + m_movePub = nh.advertise("cmd_vel", 10); + m_headPub = nh.advertise("joint_angles", 1); + + m_stiffnessDisableClient = nh.serviceClient("body_stiffness/disable"); + m_stiffnessEnableClient = nh.serviceClient("body_stiffness/enable"); + m_wakeup = nh.serviceClient("wakeup"); + m_rest = nh.serviceClient("rest"); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +bool teleop::callBodyPoseClient(const std::string& poseName) +{ + if (!m_bodyPoseClient.isServerConnected()) { + return false; + } + + naoqi_bridge_msgs::BodyPoseGoal goal; + + goal.pose_name = poseName; + + m_bodyPoseClient.sendGoalAndWait(goal, m_bodyPoseTimeOut); + + actionlib::SimpleClientGoalState state = m_bodyPoseClient.getState(); + if (state != actionlib::SimpleClientGoalState::SUCCEEDED) { + ROS_ERROR("Pose action \"%s\" did not succeed (%s): %s", goal.pose_name.c_str(), state.toString().c_str(), state.text_.c_str()); + return false; + } else { + ROS_INFO("Pose action \"%s\" succeeded", goal.pose_name.c_str()); + return true; + } +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void teleop::operation(int c) +{ + if (c == 'i') { // move forward + geometry_msgs::Twist msgTwist; + msgTwist.linear.x = 0.1; + msgTwist.linear.y = 0; + msgTwist.linear.z = 0; + msgTwist.angular.x = 0; + msgTwist.angular.y = 0; + msgTwist.angular.z = 0; + m_movePub.publish(msgTwist); + } else if (c == 'j') { // move left + geometry_msgs::Twist msgTwist; + msgTwist.linear.x = 0; + msgTwist.linear.y = 0.1; + msgTwist.linear.z = 0; + msgTwist.angular.x = 0; + msgTwist.angular.y = 0; + msgTwist.angular.z = 0; + m_movePub.publish(msgTwist); + } else if (c == 'l') { // move right + geometry_msgs::Twist msgTwist; + msgTwist.linear.x = 0; + msgTwist.linear.y = -0.1; + msgTwist.linear.z = 0; + msgTwist.angular.x = 0; + msgTwist.angular.y = 0; + msgTwist.angular.z = 0; + m_movePub.publish(msgTwist); + } else if (c == 'u') { // turn left + geometry_msgs::Twist msgTwist; + msgTwist.linear.x = 0; + msgTwist.linear.y = 0; + msgTwist.linear.z = 0; + msgTwist.angular.x = 0; + msgTwist.angular.y = 0; + msgTwist.angular.z = 0.2; + m_movePub.publish(msgTwist); + } else if (c == 'o') { // turn right + geometry_msgs::Twist msgTwist; + msgTwist.linear.x = 0; + msgTwist.linear.y = 0; + msgTwist.linear.z = 0; + msgTwist.angular.x = 0; + msgTwist.angular.y = 0; + msgTwist.angular.z = -0.2; + m_movePub.publish(msgTwist); + } else if (c == 'k') { // move backward + geometry_msgs::Twist msgTwist; + msgTwist.linear.x = -0.1; + msgTwist.linear.y = 0; + msgTwist.linear.z = 0; + msgTwist.angular.x = 0; + msgTwist.angular.y = 0; + msgTwist.angular.z = 0; + m_movePub.publish(msgTwist); + } else if (c == 's') { // stop + geometry_msgs::Twist msgTwist; + msgTwist.linear.x = 0; + msgTwist.linear.y = 0; + msgTwist.linear.z = 0; + msgTwist.angular.x = 0; + msgTwist.angular.y = 0; + msgTwist.angular.z = 0; + m_movePub.publish(msgTwist); + } + else if (c == 'p') + { + std_srvs::Empty e; + + m_fStiffness = !m_fStiffness; + + if (m_fStiffness) { + std::cout << "enabling stiffness\r\n" << std::endl; + m_stiffnessEnableClient.call(e); + } else { + std::cout << "disabling stiffness\r\n" << std::endl; + m_stiffnessDisableClient.call(e); + } + } + else if (c == 'e') + { + std_srvs::Empty e; + m_wakeup.call(e); + } + else if (c == 'r') + { + std_srvs::Empty e; + m_rest.call(e); + } + else if (c == '0') + { + setPepperHeadPitchYaw(0.0, 0.0); + } + else if (c == '1') + { + setPepperHeadPitchYaw(-0.35, 0.0); + } + else if (c == '2') + { + setPepperHeadPitchYaw(0.0, 0.7); + } + else if (c == '3') + { + setPepperHeadPitchYaw(0.0, -0.7); + } +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void teleop::setPepperHeadPitch(float angle) +{ + naoqi_bridge_msgs::JointAnglesWithSpeed headpose; + + headpose.joint_names.clear(); + headpose.joint_names.push_back("HeadPitch"); + headpose.joint_angles.clear(); + headpose.joint_angles.push_back(angle); + headpose.speed = 0.1; + headpose.relative = 0; // ABSOLUTE MODE + + m_headPub.publish(headpose); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void teleop::setPepperHeadYaw(float angle) +{ + naoqi_bridge_msgs::JointAnglesWithSpeed headpose; + + headpose.joint_names.clear(); + headpose.joint_names.push_back("HeadYaw"); + headpose.joint_angles.clear(); + headpose.joint_angles.push_back(angle); + headpose.speed = 0.1; + headpose.relative = 0; // ABSOLUTE MODE + + m_headPub.publish(headpose); +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void teleop::setPepperHeadPitchYaw(float pitch, float yaw) +{ + naoqi_bridge_msgs::JointAnglesWithSpeed headpose; + + headpose.joint_names.clear(); + headpose.joint_names.push_back("HeadPitch"); + headpose.joint_names.push_back("HeadYaw"); + headpose.joint_angles.clear(); + headpose.joint_angles.push_back(pitch); + headpose.joint_angles.push_back(yaw); + headpose.speed = 0.1; + headpose.relative = 0; // ABSOLUTE MODE + + m_headPub.publish(headpose); +} diff --git a/linux/HoloROSBridge/CMakeLists.txt b/linux/HoloROSBridge/CMakeLists.txt new file mode 100644 index 0000000..4d02c94 --- /dev/null +++ b/linux/HoloROSBridge/CMakeLists.txt @@ -0,0 +1,202 @@ +cmake_minimum_required(VERSION 2.8.3) +project(hololens_ros_bridge) + +## Compile as C++11, supported in ROS Kinetic and newer +add_compile_options(-std=c++11) + +## Find catkin macros and libraries +## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) +## is used, also find other catkin packages +find_package(catkin REQUIRED COMPONENTS + roscpp + std_msgs + message_generation + tf +) + +find_package(Eigen3 REQUIRED NO_MODULE) + +## System dependencies are found with CMake's conventions +# find_package(Boost REQUIRED COMPONENTS system) + + +## Uncomment this if the package has a setup.py. This macro ensures +## modules and global scripts declared therein get installed +## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html +# catkin_python_setup() + +################################################ +## Declare ROS messages, services and actions ## +################################################ + +## To declare and build messages, services or actions from within this +## package, follow these steps: +## * Let MSG_DEP_SET be the set of packages whose message types you use in +## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). +## * In the file package.xml: +## * add a build_depend tag for "message_generation" +## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET +## * If MSG_DEP_SET isn't empty the following dependency has been pulled in +## but can be declared for certainty nonetheless: +## * add a exec_depend tag for "message_runtime" +## * In this file (CMakeLists.txt): +## * add "message_generation" and every package in MSG_DEP_SET to +## find_package(catkin REQUIRED COMPONENTS ...) +## * add "message_runtime" and every package in MSG_DEP_SET to +## catkin_package(CATKIN_DEPENDS ...) +## * uncomment the add_*_files sections below as needed +## and list every .msg/.srv/.action file to be processed +## * uncomment the generate_messages entry below +## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) + +## Generate messages in the 'msg' folder +# add_message_files( +# FILES +# Message1.msg +# Message2.msg +# ) + +## Generate services in the 'srv' folder +# add_service_files( +# FILES +# Service1.srv +# Service2.srv +# ) + +## Generate actions in the 'action' folder +# add_action_files( +# FILES +# Action1.action +# Action2.action +# ) + +## Generate added messages and services with any dependencies listed here +# generate_messages( +# DEPENDENCIES +# std_msgs # Or other packages containing msgs +# ) + +################################################ +## Declare ROS dynamic reconfigure parameters ## +################################################ + +## To declare and build dynamic reconfigure parameters within this +## package, follow these steps: +## * In the file package.xml: +## * add a build_depend and a exec_depend tag for "dynamic_reconfigure" +## * In this file (CMakeLists.txt): +## * add "dynamic_reconfigure" to +## find_package(catkin REQUIRED COMPONENTS ...) +## * uncomment the "generate_dynamic_reconfigure_options" section below +## and list every .cfg file to be processed + +## Generate dynamic reconfigure parameters in the 'cfg' folder +# generate_dynamic_reconfigure_options( +# cfg/DynReconf1.cfg +# cfg/DynReconf2.cfg +# ) + +################################### +## catkin specific configuration ## +################################### +## The catkin_package macro generates cmake config files for your package +## Declare things to be passed to dependent projects +## INCLUDE_DIRS: uncomment this if your package contains header files +## LIBRARIES: libraries you create in this project that dependent projects also need +## CATKIN_DEPENDS: catkin_packages dependent projects also need +## DEPENDS: system dependencies of this project that dependent projects also need +catkin_package( +# INCLUDE_DIRS include +# LIBRARIES hololens_ros_bridge +# CATKIN_DEPENDS other_catkin_pkg +# DEPENDS system_lib +) + +########### +## Build ## +########### + +## Specify additional locations of header files +## Your package locations should be listed before other locations +include_directories( +include +${catkin_INCLUDE_DIRS} +${Eigen3_INCLUDE_DIRS} +) + +## Declare a C++ library +#add_library(${PROJECT_NAME} +# src/data_connection.cpp +# src/funcs.cpp +#) + +## Add cmake target dependencies of the library +## as an example, code may need to be generated before libraries +## either from message generation or dynamic reconfigure +# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Declare a C++ executable +## With catkin_make all packages are built within a single CMake context +## The recommended prefix ensures that target names across packages don't collide +add_executable(${PROJECT_NAME}_node + src/hololens_ros_bridge_node.cpp + src/data_connection.cpp + src/funcs.cpp + ) + +## Rename C++ executable without prefix +## The above recommended prefix causes long target names, the following renames the +## target back to the shorter version for ease of user use +## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node" +# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "") + +## Add cmake target dependencies of the executable +## same as for the library above +# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Specify libraries to link a library or executable target against + target_link_libraries(${PROJECT_NAME}_node + ${catkin_LIBRARIES} + ) + +############# +## Install ## +############# + +# all install targets should use catkin DESTINATION variables +# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html + +## Mark executable scripts (Python etc.) for installation +## in contrast to setup.py, you can choose the destination +# install(PROGRAMS +# scripts/my_python_script +# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark executables for installation +## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html +# install(TARGETS ${PROJECT_NAME}_node +# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark libraries for installation +## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html +# install(TARGETS ${PROJECT_NAME} +# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +# ) + +## Mark cpp header files for installation +# install(DIRECTORY include/${PROJECT_NAME}/ +# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} +# FILES_MATCHING PATTERN "*.h" +# PATTERN ".svn" EXCLUDE +# ) + +## Mark other files for installation (e.g. launch and bag files, etc.) +# install(FILES +# # myfile1 +# # myfile2 +# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +# ) diff --git a/linux/HoloROSBridge/include/data_connection.h b/linux/HoloROSBridge/include/data_connection.h new file mode 100644 index 0000000..93298fb --- /dev/null +++ b/linux/HoloROSBridge/include/data_connection.h @@ -0,0 +1,49 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include +#include + +// +// needs to match SurfaceMeshStreamHeader in the HoloLensSpatialMapping Windows application +#define SMSH_SIGNATURE "SMSHSIG_" + +struct SurfaceMeshStreamHeader +{ + unsigned char signature[8]; + float scale; + float center_x; + float center_y; + float center_z; + float orientation_x; + float orientation_y; + float orientation_z; + float orientation_w; + unsigned int uVertexBufferSize; + unsigned int uNumVertices; +}; + +enum EnumAskState +{ + Default_Ask_Pose, + Refresh_Anchor_And_Render_Map, + Update_Anchor +}; + +enum EnumRepliedState +{ + Recv_Pose, + Recv_Anchor_Pose, + Recv_Anchor_Pose_Map +}; + +void recvMessage(int sock,unsigned int size,char* pointer); +int reverseEndian(int n); diff --git a/linux/HoloROSBridge/include/funcs.h b/linux/HoloROSBridge/include/funcs.h new file mode 100644 index 0000000..245df31 --- /dev/null +++ b/linux/HoloROSBridge/include/funcs.h @@ -0,0 +1,26 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include + +#include +#include +#include +#include +#include +#include +#include + +typedef std::map MTF; + +tf::StampedTransform positionMat2tf(float* posdata); +tf::StampedTransform EigenMat2tf(Eigen::Matrix4d posdataMat); +Eigen::Matrix4d tf2EigenMat(tf::StampedTransform tfdata); diff --git a/linux/HoloROSBridge/package.xml b/linux/HoloROSBridge/package.xml new file mode 100644 index 0000000..4c4dc7f --- /dev/null +++ b/linux/HoloROSBridge/package.xml @@ -0,0 +1,59 @@ + + + hololens_ros_bridge + 0.0.0 + The hololens_ros_bridge package + + + + + t-ryish + + + + + + TODO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + catkin + + + + + + + + diff --git a/linux/HoloROSBridge/src/data_connection.cpp b/linux/HoloROSBridge/src/data_connection.cpp new file mode 100644 index 0000000..d3187f2 --- /dev/null +++ b/linux/HoloROSBridge/src/data_connection.cpp @@ -0,0 +1,43 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "data_connection.h" + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void recvMessage(int sock, unsigned int size, char* pointer) +{ + int total=0; + while (total < size) { + int n = recv(sock, pointer, (size-total), 0); + total += n; + pointer += n; + } +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +int reverseEndian(int n) +{ + int ret; + char* n_buf,*ret_buf; + + n_buf = (char*)&n; + ret_buf = (char*)&ret; + + ret_buf[0] = n_buf[3]; + ret_buf[1] = n_buf[2]; + ret_buf[2] = n_buf[1]; + ret_buf[3] = n_buf[0]; + + return ret; +} diff --git a/linux/HoloROSBridge/src/funcs.cpp b/linux/HoloROSBridge/src/funcs.cpp new file mode 100644 index 0000000..fd2f519 --- /dev/null +++ b/linux/HoloROSBridge/src/funcs.cpp @@ -0,0 +1,56 @@ +#include "funcs.h" + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +tf::StampedTransform positionMat2tf(float* posdata) +{ + tf::StampedTransform tf_ret; + + tf_ret.setOrigin(tf::Vector3(posdata[12], posdata[13], posdata[14])); + + tf::Matrix3x3 rot(posdata[0],posdata[4],posdata[8], + posdata[1],posdata[5],posdata[9], + posdata[2],posdata[6],posdata[10]); + tf::Quaternion q; + rot.getRotation(q); + tf_ret.setRotation(q); + tf_ret.stamp_=ros::Time::now(); + + return tf_ret; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +tf::StampedTransform EigenMat2tf(Eigen::Matrix4d posdataMat) +{ + double* posdata = posdataMat.data(); + tf::StampedTransform tf_ret; + + tf_ret.setOrigin(tf::Vector3(posdata[12], posdata[13], posdata[14])); + + tf::Matrix3x3 rot(posdata[0],posdata[4],posdata[8], + posdata[1],posdata[5],posdata[9], + posdata[2],posdata[6],posdata[10]); + tf::Quaternion q; + rot.getRotation(q); + tf_ret.setRotation(q); + tf_ret.stamp_=ros::Time::now(); + + return tf_ret; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +Eigen::Matrix4d tf2EigenMat(tf::StampedTransform tfdata) +{ + tf::Vector3 origin = tfdata.getOrigin(); + tf::Matrix3x3 rot = tfdata.getBasis(); + Eigen::Matrix4d ret; + ret << rot[0][0],rot[0][1],rot[0][2],origin[0], + rot[1][0],rot[1][1],rot[1][2],origin[1], + rot[2][0],rot[2][1],rot[2][2],origin[2], + 0,0,0,1; + + return ret; +} diff --git a/linux/HoloROSBridge/src/hololens_ros_bridge_node.cpp b/linux/HoloROSBridge/src/hololens_ros_bridge_node.cpp new file mode 100644 index 0000000..cb1f031 --- /dev/null +++ b/linux/HoloROSBridge/src/hololens_ros_bridge_node.cpp @@ -0,0 +1,421 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "ros/ros.h" +#include "std_msgs/String.h" +#include "std_msgs/Bool.h" +#include "sensor_msgs/PointCloud.h" +#include "data_connection.h" +#include "funcs.h" + +#include +#include +#include +#include + +const char * pszAppName = "hololens_ros_bridge"; +bool sendMsg = false; +bool recvTF = false; +float keptHeight; +tf::StampedTransform alignedTF; +tf::StampedTransform tf_hololens2ros; + +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void callbackTest(const std_msgs::Bool::ConstPtr &subscr_ping) +{ + ROS_INFO("Request to initialize HoloLens bridge received."); + sendMsg = true; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +void callback_aligned(const geometry_msgs::PoseStamped::ConstPtr &ps) +{ + // + // callback for receiving alignment result + + ROS_INFO("Request to align localized spatial anchor received."); + + // convert PoseStamped to tf + alignedTF.setOrigin(tf::Vector3(ps->pose.position.x, ps->pose.position.y, keptHeight)); + + tf::Quaternion initq( + ps->pose.orientation.x, + ps->pose.orientation.y, + ps->pose.orientation.z, + ps->pose.orientation.w); + + alignedTF.setRotation(initq); + + tf::Transform tf_map2sa = alignedTF; + + alignedTF.setData(tf_map2sa); + alignedTF.stamp_=ros::Time::now(); + alignedTF.frame_id_ = std::string("map"); + alignedTF.child_frame_id_ = std::string("spatialAnchor_ros"); + + recvTF = true; +} +/*****************************************************************************/ +/* */ +/*****************************************************************************/ +int main(int argc, char **argv) +{ + if (argc < 3) + { + ROS_ERROR("missing parameters: ("/hololens/pc", 1000); + + // floor surface normal + ros::Publisher pub2 = nh.advertise("/hololens/floor_normal", 1000); + + // port, ip_address of HoloLens (port should be 1234) + const int port = std::stoi(argv[2]); + const char * ip_address = argv[1]; + + ROS_INFO("Socket trying to connect to %s:%i...", ip_address, port); + + int sock = socket(PF_INET, SOCK_STREAM, 0); + + // connection + struct sockaddr_in addr; + + addr.sin_family = AF_INET; + addr.sin_port = htons(port); + addr.sin_addr.s_addr = inet_addr(ip_address); + + if (connect(sock, (struct sockaddr *)&addr, sizeof(addr)) != 0) + { + ROS_FATAL("Socket connection error."); + return -1; + } + + ROS_INFO("Socket successfully connected."); + + EnumAskState askState; + EnumRepliedState repState; + char recvBuf[2048]; + float cameraPosition[20]; + float holoLensHeight; + Eigen::Vector3d toFloor=Eigen::Vector3d::Zero(); + + // Spatial Anchor (SA) in ROS - SA in HoloLens (Rotation alignment) + Eigen::Matrix4d hololens2ros; + + hololens2ros << 0, 0, -1, 0, + -1, 0, 0, 0, + 0, 1, 0, 0, + 0, 0, 0, 1; + + // transformation chain: map - SA (ROS) - SA (HoloLens) - HoloLens camera + tf::StampedTransform tf_anchor2camera; + tf::StampedTransform tf_map2anchor; + + tf_hololens2ros = EigenMat2tf(hololens2ros); + tf_hololens2ros.frame_id_ = std::string("spatialAnchor_ros"); + tf_hololens2ros.child_frame_id_ = std::string("spatialAnchor"); + + tf_map2anchor = EigenMat2tf(Eigen::Matrix4d::Identity()); + tf_map2anchor.frame_id_ = std::string("map"); + tf_map2anchor.child_frame_id_ = std::string("spatialAnchor_ros"); + + // tf broadcaster + tf::TransformBroadcaster tf_br_; + + // string id - tf lookup table + MTF transLookUp; + + // string id of current anchor + std::string CurrentAnchor; + bool transBool = false; + sensor_msgs::PointCloud pointcloud; + + while (ros::ok()) + { + askState = Default_Ask_Pose; + if (sendMsg) + { // if receiving localization initialization from anchor localizer + askState = Refresh_Anchor_And_Render_Map; + transLookUp.clear(); + ROS_INFO("Refreshing point cloud data and anchors..."); + sendMsg = false; + } + + if (recvTF) + { // if receiving alignment result from anchor localizer + // update tf of map-SA (ROS) + transLookUp.at(CurrentAnchor) = alignedTF; + tf_map2anchor = alignedTF; + recvTF=false; + } + + // send message to HoloLens + int nRevAskState = reverseEndian(askState); + int nWrite = write(sock, &nRevAskState, sizeof(EnumAskState)); + if (nWrite <= 0) + { + ROS_FATAL("Socket write failed (%d bytes).", nWrite); + break; + } + + // receive message from HoloLens + int nRead = read(sock, (char *)&repState, sizeof(EnumRepliedState)); + if (nRead <= 0) + { + ROS_FATAL("Socket read failed (%d bytes).", nRead); + break; + } + + // received message + repState = EnumRepliedState(reverseEndian(repState)); + + switch (repState) + { + case Recv_Anchor_Pose_Map: + { // receive point cloud + // receive transformation of (old SA-new SA) in HoloLens + recvMessage(sock, sizeof(float) * 16, (char *)cameraPosition); + + Eigen::Matrix4d anc2anc; + anc2anc << cameraPosition[0], cameraPosition[4], cameraPosition[8], cameraPosition[12], + cameraPosition[1], cameraPosition[5], cameraPosition[9], cameraPosition[13], + cameraPosition[2], cameraPosition[6], cameraPosition[10], cameraPosition[14], + cameraPosition[3], cameraPosition[7], cameraPosition[11], cameraPosition[15]; + + // obtain new SA position (temporal) + Eigen::Matrix4d map2anc = tf2EigenMat(tf_map2anchor); + std::cout << pszAppName << ": old anchor to new anchor " << std::endl; + std::cout << /* std::setw(16) << */ anc2anc << std::endl; + + // tf:[map-new SA (ROS)] = (map - old SA) * (Ros-HoloLens) * (old SA-new SA) * (HoloLens - ROS) + map2anc = map2anc * tf2EigenMat(tf_hololens2ros) * anc2anc * tf2EigenMat(tf_hololens2ros).inverse(); + std::cout << pszAppName << ": map to new anchor " << std::endl; + std::cout << /* std::setw(16) << */ map2anc << std::endl; + + // update tf:[map-SA (ROS)] + tf_map2anchor = EigenMat2tf(map2anc); + tf_map2anchor.frame_id_ = std::string("map"); + tf_map2anchor.child_frame_id_ = std::string("spatialAnchor_ros"); + + // receive point cloud + char buffer[4], bufferswap[4]; + + // size of point cloud buffer + recvMessage(sock, 4, buffer); + + bufferswap[0] = buffer[3]; + bufferswap[1] = buffer[2]; + bufferswap[2] = buffer[1]; + bufferswap[3] = buffer[0]; + + unsigned int uTotalBufferSize = ((unsigned int *)bufferswap)[0]; + char * vertexCollectionDataBuffer = (char *)malloc(sizeof(char) * uTotalBufferSize); + + recvMessage(sock, uTotalBufferSize, vertexCollectionDataBuffer); + + char * ptr = vertexCollectionDataBuffer; + unsigned int currentBufferPosition = 0; + + pointcloud.points.clear(); + + // read buffer + while (true) + { + // SurfaceMeshStreamHeader contains center, orientation, and vertex position length of point cloud + SurfaceMeshStreamHeader * hdr = (SurfaceMeshStreamHeader *)ptr; + + if (memcmp(hdr->signature, (void *)SMSH_SIGNATURE, sizeof(hdr->signature)) != 0) { + ROS_ERROR("vertex collection buffer misalignement at offset %u.", currentBufferPosition); + break; + } + + Eigen::Vector3d center; + + center << hdr->center_x, hdr->center_y, hdr->center_z; + + Eigen::Quaterniond orientation = Eigen::Quaterniond(hdr->orientation_w, hdr->orientation_x, hdr->orientation_y, hdr->orientation_z); + + unsigned int uNumVertices = hdr->uNumVertices; + + // move to vertex buffer + ptr += sizeof(SurfaceMeshStreamHeader); + + short * vertexBuffer = (short *)ptr; + + // read point cloud + for (int i = 0; i < uNumVertices; i++) + { + Eigen::Vector3d p; + + // point position + p << vertexBuffer[i * 4] * 0.0025f, vertexBuffer[i * 4 + 1] * 0.0025f, vertexBuffer[i * 4 + 2] * 0.0025f; + + // point transformation + p = orientation * p + center; + + // store in point cloud array + geometry_msgs::Point32 pt; + + pt.x = p(0); + pt.y = p(1); + pt.z = p(2); + + pointcloud.points.push_back(pt); + } + + //move to next header + ptr += hdr->uVertexBufferSize; + currentBufferPosition += sizeof(SurfaceMeshStreamHeader) + hdr->uVertexBufferSize; + if (currentBufferPosition >= uTotalBufferSize) + break; + } + + // + // done with buffer, delete it + // free(vertexCollectionDataBuffer); + + // ack for point cloud publish + transBool = true; + // continue to next process in (case Recv_Anchor_Pose:) + } + // fall through... + + case Recv_Anchor_Pose: + { // receive anchor id + char buffer[4], bufferswap[4]; + + // anchor string id length + recvMessage(sock, 4, buffer); + bufferswap[0] = buffer[3]; + bufferswap[1] = buffer[2]; + bufferswap[2] = buffer[1]; + bufferswap[3] = buffer[0]; + + unsigned int bufferLength = ((unsigned int *)bufferswap)[0]; + char *strBuf = (char *)malloc(sizeof(char) * bufferLength + 1); + + // Anchor string id + recvMessage(sock, sizeof(char) * bufferLength, strBuf); + strBuf[sizeof(char) * bufferLength] = 0; + std::string str(strBuf); + std::cout << pszAppName << ": anchor string is '" << str << "'" << std::endl; + MTF::iterator it = transLookUp.find(str); + if (it != transLookUp.end()) + {//obtain existing tf from look up table + tf_map2anchor = transLookUp.at(str); + } + else + {//create new tf + CurrentAnchor = str; + transLookUp.insert(MTF::value_type(str, tf_map2anchor)); + } + free(strBuf); + //continue to next process in (case Recv_Pose:) + } + // TODO: FALL THROUGH? + + case Recv_Pose: + { // receive camera position as 4x4 float matrix + floor information + // receive 4x4 matrix and update tf + recvMessage(sock, sizeof(float) * 20, (char *)cameraPosition); + Eigen::Matrix4d anc2cam; + anc2cam << cameraPosition[0], cameraPosition[4], cameraPosition[8], cameraPosition[12], + cameraPosition[1], cameraPosition[5], cameraPosition[9], cameraPosition[13], + cameraPosition[2], cameraPosition[6], cameraPosition[10], cameraPosition[14], + cameraPosition[3], cameraPosition[7], cameraPosition[11], cameraPosition[15]; + + tf_anchor2camera = EigenMat2tf(anc2cam); + tf_anchor2camera.frame_id_ = std::string("spatialAnchor"); + tf_anchor2camera.child_frame_id_ = std::string("hololens"); + tf_anchor2camera.stamp_ = ros::Time::now(); + // broadcast + tf_br_.sendTransform(tf_anchor2camera); + + // obtain HoloLens height and floor position + holoLensHeight = cameraPosition[19]; + toFloor< + + holo_nav_dash + 1.0.0 + HoloLens Navigation Dashboard. + + David Baumert + + BSD + + https://dev.azure.com/msresearch/Robotics/_git/hololens_navigation2 + + David Baumert + + catkin + geometry_msgs + rospy + diff --git a/linux/holo_nav_dash/requirements.txt b/linux/holo_nav_dash/requirements.txt new file mode 100644 index 0000000..1e4d396 --- /dev/null +++ b/linux/holo_nav_dash/requirements.txt @@ -0,0 +1,3 @@ +flask==1.1.2 +gevents==21.1.2 +gevent-websocket==0.10.1 diff --git a/linux/holo_nav_dash/settings.py b/linux/holo_nav_dash/settings.py new file mode 100644 index 0000000..c50988b --- /dev/null +++ b/linux/holo_nav_dash/settings.py @@ -0,0 +1,34 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# -------------------------------------------------------------------------------------------- + +import os,sys + +# ----------------------------------------------------------------------------- +# +def initialize(): + global appVersion # application version + global fVerbose # verbose console output + global application # application object + global cwd # current working directory + global numHttpServers + global threadIOLoop + + if (os.name == 'nt'): + os.system('color') # needed on windows platforms to support terminal colors + + appVersion = 'v1.00.0102' + fVerbose = False + application = None + + cwd = os.getcwd() + + # If we're in the "src" directory, use the parent directory as the base path + # for files to make specifying paths a little easier for the user + if (cwd[-4:] == '\\src') or (cwd[-4:] == '/src'): + cwd = cwd[0:-4] + + numHttpServers = 0 + threadIOLoop = None + diff --git a/linux/holo_nav_dash/static/images/Logo_RobotHandshake.png b/linux/holo_nav_dash/static/images/Logo_RobotHandshake.png new file mode 100644 index 0000000..c407ecf Binary files /dev/null and b/linux/holo_nav_dash/static/images/Logo_RobotHandshake.png differ diff --git a/linux/holo_nav_dash/static/js/app.js b/linux/holo_nav_dash/static/js/app.js new file mode 100644 index 0000000..c6d712b --- /dev/null +++ b/linux/holo_nav_dash/static/js/app.js @@ -0,0 +1,194 @@ +//-------------------------------------------------------------------------------------------- +// Copyright(c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// -------------------------------------------------------------------------------------------- + +var _clrControlEnabled = "#000000"; +var _clrControlDisabled = "#a0a0a0"; + +var _state_unknown = "unknown"; +var _state_not_started = "not started"; +var _state_running = "running"; +var _state_starting = "starting"; + +var _nodeMatrix = +[ + { context: "pepper_robot", lblID: "lblPepperRobot", btnID: "btnStartPepper", btnIcon: "toggleBtnPepper" }, + { context: "hololens_ros_bridge", lblID: "lblHololensROSBridge", btnID: "btnStartBridge", btnIcon: "toggleBtnBridge" }, + { context: "anchor_localizer", lblID: "lblAnchorLocalizer", btnID: "btnStartAnchor", btnIcon: "toggleBtnAnchor" }, + { context: "static_calibration", lblID: "lblStaticCalibration", btnID: "btnStartCalibration", btnIcon: "toggleBtnCalibration" }, + { context: "dynamic_adjuster", lblID: "lblDynamicAdjuster", btnID: "btnStartAdjuster", btnIcon: "toggleBtnAdjuster" }, + { context: "localizer", lblID: "lblLocalizer", btnID: "btnStartLocalizer", btnIcon: "toggleBtnLocalizer" } +]; + +var _lblHololensSequence = null; +var _btnHoloLensCalibration = null; + +//-------------------------------------------------------------------------------------------- +function onBodyLoad() +{ + // + // no scrollbars + document.body.style.overflow = 'hidden'; + + setupMiscellaneous(); + + // + // last... + setupWebSocket(); +} +//-------------------------------------------------------------------------------------------- +function setupMiscellaneous() +{ + var objBtn; + + for (var i = 0; i < _nodeMatrix.length; i++) { + // + // add additional keys + _nodeMatrix[i]['state'] = _state_unknown; + + objBtn = document.getElementById(_nodeMatrix[i]['btnID']); + if (objBtn) { + // + // disable and hide play/stop buttons. + if (false) { + // + // set up click event + var idx = i; + + objBtn.addEventListener("click", StartStopNode.bind(this, idx), false); + } else { + objBtn.style.display = "none"; + } + } + } + + _btnHoloLensCalibration = document.getElementById("btnHoloLensCalibration"); + if (_btnHoloLensCalibration) { + _btnHoloLensCalibration.disabled = true; + + _btnHoloLensCalibration.addEventListener("click", function () { + var msg = JSON.stringify({ msgType: "calibrateHoloLens" }); + + sendMessage(msg); + }); + } + + objBtn = document.getElementById("btnExitApp"); + if (objBtn) { + objBtn.addEventListener("click", function () { + var msg = JSON.stringify({ msgType: "quitApplication" }); + + sendMessage(msg); + }); + } + + _lblHololensSequence = document.getElementById("lblHololensSequence"); +} +//-------------------------------------------------------------------------------------------- +function EnableDisableUIControl(id, enable, changeColor = false) +{ + var obj = document.getElementById(id); + + if (!obj) + return; + + if (obj.checked != undefined) + obj.checked = enable ? true : false; + + if (obj.disabled != undefined) + obj.disabled = enable ? false : true; + + if (changeColor) + obj.style.color = enable ? _clrControlEnabled : _clrControlDisabled; +} +//-------------------------------------------------------------------------------------------- +function StartStopNode(idx, event) +{ + if ((idx < 0) || (idx >= _nodeMatrix.length)) { + console.warn("StartStopNode(): idx out of range."); + return; + } + + var info = _nodeMatrix[idx]; + var context = info['context']; + var state = info['state']; + var objLbl = document.getElementById(info['lblID']); + + if ((state == _state_unknown) || (state == _state_not_started)) { + var msg = JSON.stringify({ msgType: "startNode", startNode: context }); + + sendMessage(msg); + + EnableDisableUIControl(info['btnID'], false); + + _nodeMatrix[idx]['state'] = _state_starting; + + if (objLbl) + objLbl.innerText = _state_starting; + } +} +//-------------------------------------------------------------------------------------------- +function setROSNodesStatus(status) +{ + var enableCalibrationBtn = true; + + // console.log(status); + + for (var i = 0; i < _nodeMatrix.length; i++) { + var info = _nodeMatrix[i]; + var context = info['context']; + var state = info['state']; + var isRunning = status[context]; + var obj; + var update = false; + + // + // all modules need to be running for calibration + if (!isRunning) + enableCalibrationBtn = false; + + if ((state != _state_running) && (isRunning)) { + _nodeMatrix[i]['state'] = state = _state_running; + update = true; + } else if (((state == _state_running) || (state == _state_unknown)) && (!isRunning)) { + _nodeMatrix[i]['state'] = state = _state_not_started; + update = true; + } + + if (update) { + // + // update + obj = document.getElementById(info['lblID']); + if (obj) { + console.log("changing label to '" + state + "'"); + obj.innerText = state; + } else { + console.log("no label object."); + } + + if (state != _state_starting) { + var btnID = info['btnID']; + var btnIcon = info['btnIcon']; + + EnableDisableUIControl(btnID, true); + + obj = document.getElementById(btnIcon); + if (obj) { + obj.className = isRunning ? "fa fa-stop" : "fa fa-play"; + } + } + } + } + + if (_lblHololensSequence != null) { + if (status.hololens_sequence) + _lblHololensSequence.innerText = status.hololens_sequence; + else + _lblHololensSequence.innerText = "-"; + } + + if (_btnHoloLensCalibration != null) { + _btnHoloLensCalibration.disabled = false; // enableCalibrationBtn ? false : true; + } +} diff --git a/linux/holo_nav_dash/static/js/websocket.js b/linux/holo_nav_dash/static/js/websocket.js new file mode 100644 index 0000000..b47db4c --- /dev/null +++ b/linux/holo_nav_dash/static/js/websocket.js @@ -0,0 +1,164 @@ +//-------------------------------------------------------------------------------------------- +// Copyright(c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. +// -------------------------------------------------------------------------------------------- + + +var _socket = null; +var _wsWrapper = null; + +//-------------------------------------------------------------------------------------------- +function setupWebSocket() +{ + _wsWrapper = new WebSocketWrapper(); + if (_wsWrapper == null) { + reportStatus("Failed to create webSocket wrapper object."); + return; + } + + _wsWrapper.initialize();; +} +//-------------------------------------------------------------------------------------------- +function sendMessage(msg) +{ + if (_wsWrapper == null) + return; + + _wsWrapper.sendMessage(msg); +} +//-------------------------------------------------------------------------------------------- + +var messages = null; + +//-------------------------------------------------------------------------------------------- +function reportStatus(msg) +{ + if (messages == null) + messages = document.getElementById('lblStatus'); + + var shouldScroll = messages.scrollTop + messages.clientHeight === messages.scrollHeight; + + var lbl = document.createElement('div'); + + lbl.innerHTML = msg; + //lbl.style.padding = "4px"; + //lbl.style.color = "#000"; + lbl.style.fontSize = "75%"; + + messages.appendChild(lbl); + + if (!shouldScroll) { + // scroll To Bottom + messages.scrollTop = messages.scrollHeight; + } +} +//-------------------------------------------------------------------------------------------- +WebSocketWrapper = function () +{ + this.webSocket = null; + this.connected = false; +} + +WebSocketWrapper.prototype.constructor = WebSocketWrapper; + +//-------------------------------------------------------------------------------------------- +WebSocketWrapper.prototype.initialize = function () +{ + var host = "ws://" + location.hostname + ":" + location.port + "/ws"; + + try + { + reportStatus('Connecting to "' + host + '"...'); + + this.webSocket = new WebSocket(host); + + this.webSocket.onopen = this.onWebSocketOpen.bind(this); + this.webSocket.onclose = this.onWebSocketClose.bind(this); + this.webSocket.onmessage = this.onWebSocketMessage.bind(this); + this.webSocket.onerror = this.onWebSocketError.bind(this); + } + catch (exception) + { + reportStatus('WebSocket: exception'); + if (window.console) + console.log(exception); + } +} +//-------------------------------------------------------------------------------------------- +WebSocketWrapper.prototype.onWebSocketOpen = function (event) +{ + this.connected = true; + reportStatus('WebSocket: connection opened.'); +} +//-------------------------------------------------------------------------------------------- +WebSocketWrapper.prototype.onWebSocketClose = function (event) +{ + var reason = ""; + + // + // only error code 1006 is dispatched: https://www.w3.org/TR/websockets/#concept-websocket-close-fail + // + if (event.code == 1000) + reason = "Normal closure, meaning that the purpose for which the connection was established has been fulfilled."; + else if (event.code == 1001) + reason = "An endpoint is \"going away\", such as a server going down or a browser having navigated away from a page."; + else if (event.code == 1002) + reason = "An endpoint is terminating the connection due to a protocol error"; + else if (event.code == 1003) + reason = "An endpoint is terminating the connection because it has received a type of data it cannot accept (e.g., an endpoint that understands only text data MAY send this if it receives a binary message)."; + else if (event.code == 1004) + reason = "Reserved. The specific meaning might be defined in the future."; + else if (event.code == 1005) + reason = "No status code was actually present."; + else if (event.code == 1006) { + if (this.connected) + reason = "The connection was closed abnormally."; + else + reason = "The connection could not be established."; + } else if (event.code == 1007) + reason = "An endpoint is terminating the connection because it has received data within a message that was not consistent with the type of the message (e.g., non-UTF-8 [http://tools.ietf.org/html/rfc3629] data within a text message)."; + else if (event.code == 1008) + reason = "An endpoint is terminating the connection because it has received a message that \"violates its policy\". This reason is given either if there is no other sutible reason, or if there is a need to hide specific details about the policy."; + else if (event.code == 1009) + reason = "An endpoint is terminating the connection because it has received a message that is too big for it to process."; + else if (event.code == 1010) // Note that this status code is not used by the server, because it can fail the WebSocket handshake instead. + reason = "An endpoint (client) is terminating the connection because it has expected the server to negotiate one or more extension, but the server didn't return them in the response message of the WebSocket handshake.
Specifically, the extensions that are needed are: " + event.reason; + else if (event.code == 1011) + reason = "A server is terminating the connection because it encountered an unexpected condition that prevented it from fulfilling the request."; + else if (event.code == 1015) + reason = "The connection was closed due to a failure to perform a TLS handshake (e.g., the server certificate can't be verified)."; + else + reason = "Unknown reason"; + + reportStatus('WebSocket: ' + reason); + + this.connected = false; +} +//-------------------------------------------------------------------------------------------- +WebSocketWrapper.prototype.onWebSocketMessage = function (event) +{ + // reportStatus('WebSocket: message received: ' + event.data); + + if (typeof event === "undefined" || typeof event.data !== "string") + return; + + var data = JSON.parse(event.data); + + // console.log("socket msg: '" + data.msgType + "'"); + if (data.msgType == 'initialization') { + } else if (data.msgType == 'status') { + setROSNodesStatus(data.status); + } +} +//-------------------------------------------------------------------------------------------- +WebSocketWrapper.prototype.onWebSocketError = function (event) +{ + //reportStatus('WebSocket Error: ' + event.data + ''); + //console.log(event); +} +//-------------------------------------------------------------------------------------------- +WebSocketWrapper.prototype.sendMessage = function (msg) +{ + if (this.connected) + this.webSocket.send(msg); +} diff --git a/linux/holo_nav_dash/static/main.css b/linux/holo_nav_dash/static/main.css new file mode 100644 index 0000000..8569494 --- /dev/null +++ b/linux/holo_nav_dash/static/main.css @@ -0,0 +1,98 @@ + +@import url(https://fonts.googleapis.com/css?family=Lato:700italic,400,400italic,700); +@import url(https://fonts.googleapis.com/css?family=Open+Sans:400,800); +@import url(https://fonts.googleapis.com/css?family=Droid+Sans+Mono); + +html { + -webkit-font-smoothing: antialiased; +} + +/* + +*/ +input.customBtn { + font-size: .825em; + font-weight: 700; + padding: .35em 1em; + background-color: #eaeef1; + background-image: linear-gradient(rgba(0,0,0,0), rgba(0,0,0,.1)); + border: #c0c0c0; + border-style: solid; + border-width: 1px; + border-radius: 3px; + outline: none; + color: rgba(0,0,0,.5); + text-decoration: none; + text-shadow: 0 1px 1px rgba(255,255,255,.7); +} + +input.customBtn:hover, input.customBtn.hover { + background-color: #fff; +} + +input.customBtn:active, input.customBtn.active { + background-color: #d0d3d6; + background-image: linear-gradient(rgba(0,0,0,.1), rgba(0,0,0,0)); + box-shadow: inset 0 0 2px rgba(0,0,0,.2), inset 0 2px 5px rgba(0,0,0,.2), 0 1px rgba(255,255,255,.2); +} + +/* + +*/ +.butt { + font-size: .825em; + text-decoration: none; + font-weight: 700; + padding: .35em 1em; + background-color: #eaeef1; + background-image: linear-gradient(rgba(0,0,0,0), rgba(0,0,0,.1)); + border-radius: 3px; + color: rgba(0,0,0,.6); + text-shadow: 0 1px 1px rgba(255,255,255,.7); + box-shadow: 0 0 0 1px rgba(0,0,0,.2), 0 1px 2px rgba(0,0,0,.2), inset 0 1px 2px rgba(255,255,255,.7); +} + +.butt:hover, .butt.hover { + background-color: #fff; +} + +.butt:active, .butt.active { + background-color: #d0d3d6; + background-image: linear-gradient(rgba(0,0,0,.1), rgba(0,0,0,0)); + box-shadow: inset 0 0 2px rgba(0,0,0,.2), inset 0 2px 5px rgba(0,0,0,.2), 0 1px rgba(255,255,255,.2); +} + +:root { + --background:#f9f9f9; + --foreground:#2c3e50; + + --foreground-light:#34495e; + + --size:50px; + --ratio:1.2; + + --transition-time:0.3s; +} + +body { + background: var(--background); + font-family: Droid Sans Mono; + font-size: 10pt; + text-align: center; +} + +h1 { + text-transform: uppercase; + color: var(--foreground-light); + letter-spacing: 2px; + font-size: 2em; + margin-bottom: 0; +} + +.headline { + display: block; + color: var(--foreground); + font-size: 1.5em; + margin-bottom: 1.5em; +} + diff --git a/linux/holo_nav_dash/templates/favicon.ico b/linux/holo_nav_dash/templates/favicon.ico new file mode 100644 index 0000000..87f75e8 Binary files /dev/null and b/linux/holo_nav_dash/templates/favicon.ico differ diff --git a/linux/holo_nav_dash/templates/index.html b/linux/holo_nav_dash/templates/index.html new file mode 100644 index 0000000..e2fad9c --- /dev/null +++ b/linux/holo_nav_dash/templates/index.html @@ -0,0 +1,116 @@ + + + + Hololens Navigation Dashboard + + + + + + + + + + + + + + + + + + +
+
+
Microsoft Applied Robotics Research Library

+
Hololens Navigation Dashboard
+
+
+
+
+ + + +
+ Required ROS Nodes +
+ + + + + + + + +
Pepper ROS Stack
unknown
HoloLens ROS Bridge
unknown
Sequence
-
Anchor Localizer
unknown
Static Calibration
unknown
Dynamic Adjuster
unknown
Localizer
unknown
+
+
+ + + + + + + +
+ Application +
+ + + + +
+
+
+ + + +
+ Status +
+
+
+ + diff --git a/linux/holo_nav_dash/webSocket.py b/linux/holo_nav_dash/webSocket.py new file mode 100644 index 0000000..9f40f4e --- /dev/null +++ b/linux/holo_nav_dash/webSocket.py @@ -0,0 +1,221 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# -------------------------------------------------------------------------------------------- + +import sys +import os +import time +import numpy as np +import enum +import copy +import json +import gevent +from gevent.queue import Queue, Empty +from gevent.event import AsyncResult +from flask import Flask, render_template +from geventwebsocket import WebSocketServer, WebSocketApplication, Resource, WebSocketError +from collections import OrderedDict + +# ----------------------------------------------------------------------------- +# +class webSocket(WebSocketApplication): + msgType = "msgType" + eMsgType = enum.Enum(msgType, "addClient removeClient status") + + EMsgKey = enum.Enum("EMsgKey", "msgType client") + + application = None + initialized = False + queue = Queue() + clients = set() + + # ----------------------------------------------------------------------------- + # + def on_open(self): + if (self.initialized is not True): + if (self.application is not None): + self.application.setCallbackSendMessage(self.sendMessage) + self.application.setCallbackQueueMessage(self.queueMessage) + else: + print("websocket: application object expected.") + + gevent.spawn(self.handle_queue_messages) + self.initialized = True + + self.addClient(self) + self.sendInitialization(self.ws) + + # ----------------------------------------------------------------------------- + # + def on_close(self, reason): + self.removeClient(self) + + # ----------------------------------------------------------------------------- + # + def on_message(self, msg): + if msg is None: + return + + msg = json.loads(msg) + + # print ("on_message(): ", msg) + + if msg[self.msgType] == "calibrateHoloLens": + self.CalibrateHoloLens() + elif msg[self.msgType] == "startNode": + self.StartNode(msg['startNode']) + elif msg[self.msgType] == "quitApplication": + self.quitApplication() + else: + print("invalid message, msg = '%s'" + str(msg)) + + # ----------------------------------------------------------------------------- + # + def handle_queue_messages(self): + msg = None + while True: + msg = self.queue.get() + + #if msg is not None: + # print("msg: '" + str(msg[self.msgType]) + "'") + #else: + # print("msg: none") + + if msg is None: + pass + elif msg[self.msgType] is self.eMsgType.addClient: + self.clients.add(msg[self.EMsgKey.client]) + elif msg[self.msgType] is self.eMsgType.removeClient: + self.clients.discard(msg[self.EMsgKey.client]) + elif (msg[self.msgType] == "status"): + j = json.dumps(msg) + self.sendMessage(j) + elif (msg[self.msgType] == "pose"): + j = json.dumps(msg) + self.sendMessage(j) + elif (msg[self.msgType] == "exit_handle_queue_messages"): + break + else: + print("handle_queue_messages(): invalid message, msgType = %s", msg[self.msgType]) + + gevent.sleep(0.02) + + # ----------------------------------------------------------------------------- + # + @classmethod + def close(self): + if (self.queue is not None): + msg = { self.msgType: "exit_handle_queue_messages" } + self.queue.put(msg) + + # ----------------------------------------------------------------------------- + # + @classmethod + def setApplication(self, application): + self.application = application + + # ----------------------------------------------------------------------------- + # + @classmethod + def addClient(self, client): + msg = { + self.msgType: self.eMsgType.addClient, + self.EMsgKey.client: client} + + self.queue.put(msg) + + # ----------------------------------------------------------------------------- + # + @classmethod + def removeClient(self, client): + msg = { + self.msgType: self.eMsgType.removeClient, + self.EMsgKey.client: client} + + self.queue.put(msg) + + # ----------------------------------------------------------------------------- + # + @classmethod + def sendInitialization(self, ws): + if (self.application is None): + print("sendInitialization(): application object expected, client: " + str(id(ws))) + return + + status = { + self.msgType: "initialization" + } + + #status["gestureInfo"] = self.application.getGestureInfo() + #status["status"] = self.application.getStatus() + #status["pose"] = self.application.getPose() + + # + # serialize + j = json.dumps(status) + + try: + ws.send(j) + except: + print("sendInitialization() exception, client: " + str(id(ws))) + + # ----------------------------------------------------------------------------- + # + @classmethod + def queueMessage(self, msg): + self.queue.put(msg) + + # ----------------------------------------------------------------------------- + # + def handleException(self, e, msg): + if (e.message): + print(msg + " " + e.message) + elif ((e.errno) and (e.errno == 2)): + print(msg + " '" + e.filename + "' - " + e.strerror) + else: + print(msg + " " + e.strerror) + + # ----------------------------------------------------------------------------- + # + @classmethod + def sendMessage(self, msg): + j = json.dumps(msg) + ws = None + try: + for client in self.clients: + ws = client.ws + ws.send(j) + except Exception as e: + self.handleException(e, "application.sendMessage() exception, client: " + str(id(ws))) + + # ----------------------------------------------------------------------------- + # + @classmethod + def CalibrateHoloLens(self): + try: + if (self.application is not None): + self.application.CalibrateHoloLens() + except Exception as e: + self.handleException(e, "application.CalibrateHoloLens() exception, client: " + str(id(ws))) + + # ----------------------------------------------------------------------------- + # + @classmethod + def StartNode(self, name): + try: + if (self.application is not None): + self.application.StartNode(name) + except Exception as e: + self.handleException(e, "application.StartNode() exception, client: " + str(id(ws))) + + # ----------------------------------------------------------------------------- + # + @classmethod + def quitApplication(self): + try: + if (self.application is not None): + self.application.quitApplication() + except Exception as e: + self.handleException(e, "application.quitApplication() exception, client: " + str(id(ws))) + diff --git a/linux/navigation_launcher/CMakeLists.txt b/linux/navigation_launcher/CMakeLists.txt new file mode 100644 index 0000000..4403126 --- /dev/null +++ b/linux/navigation_launcher/CMakeLists.txt @@ -0,0 +1,195 @@ +cmake_minimum_required(VERSION 2.8.3) +project(navigation_launcher) + +## Compile as C++11, supported in ROS Kinetic and newer +# add_compile_options(-std=c++11) + +## Find catkin macros and libraries +## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) +## is used, also find other catkin packages +find_package(catkin REQUIRED) + +## System dependencies are found with CMake's conventions +# find_package(Boost REQUIRED COMPONENTS system) + + +## Uncomment this if the package has a setup.py. This macro ensures +## modules and global scripts declared therein get installed +## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html +# catkin_python_setup() + +################################################ +## Declare ROS messages, services and actions ## +################################################ + +## To declare and build messages, services or actions from within this +## package, follow these steps: +## * Let MSG_DEP_SET be the set of packages whose message types you use in +## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). +## * In the file package.xml: +## * add a build_depend tag for "message_generation" +## * add a build_depend and a run_depend tag for each package in MSG_DEP_SET +## * If MSG_DEP_SET isn't empty the following dependency has been pulled in +## but can be declared for certainty nonetheless: +## * add a run_depend tag for "message_runtime" +## * In this file (CMakeLists.txt): +## * add "message_generation" and every package in MSG_DEP_SET to +## find_package(catkin REQUIRED COMPONENTS ...) +## * add "message_runtime" and every package in MSG_DEP_SET to +## catkin_package(CATKIN_DEPENDS ...) +## * uncomment the add_*_files sections below as needed +## and list every .msg/.srv/.action file to be processed +## * uncomment the generate_messages entry below +## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) + +## Generate messages in the 'msg' folder +# add_message_files( +# FILES +# Message1.msg +# Message2.msg +# ) + +## Generate services in the 'srv' folder +# add_service_files( +# FILES +# Service1.srv +# Service2.srv +# ) + +## Generate actions in the 'action' folder +# add_action_files( +# FILES +# Action1.action +# Action2.action +# ) + +## Generate added messages and services with any dependencies listed here +# generate_messages( +# DEPENDENCIES +# std_msgs # Or other packages containing msgs +# ) + +################################################ +## Declare ROS dynamic reconfigure parameters ## +################################################ + +## To declare and build dynamic reconfigure parameters within this +## package, follow these steps: +## * In the file package.xml: +## * add a build_depend and a run_depend tag for "dynamic_reconfigure" +## * In this file (CMakeLists.txt): +## * add "dynamic_reconfigure" to +## find_package(catkin REQUIRED COMPONENTS ...) +## * uncomment the "generate_dynamic_reconfigure_options" section below +## and list every .cfg file to be processed + +## Generate dynamic reconfigure parameters in the 'cfg' folder +# generate_dynamic_reconfigure_options( +# cfg/DynReconf1.cfg +# cfg/DynReconf2.cfg +# ) + +################################### +## catkin specific configuration ## +################################### +## The catkin_package macro generates cmake config files for your package +## Declare things to be passed to dependent projects +## INCLUDE_DIRS: uncomment this if you package contains header files +## LIBRARIES: libraries you create in this project that dependent projects also need +## CATKIN_DEPENDS: catkin_packages dependent projects also need +## DEPENDS: system dependencies of this project that dependent projects also need +catkin_package( +# INCLUDE_DIRS include +# LIBRARIES navigation_launcher +# CATKIN_DEPENDS other_catkin_pkg +# DEPENDS system_lib +) + +########### +## Build ## +########### + +## Specify additional locations of header files +## Your package locations should be listed before other locations +include_directories( +# include +# ${catkin_INCLUDE_DIRS} +) + +## Declare a C++ library +# add_library(${PROJECT_NAME} +# src/${PROJECT_NAME}/navigation_launcher.cpp +# ) + +## Add cmake target dependencies of the library +## as an example, code may need to be generated before libraries +## either from message generation or dynamic reconfigure +# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Declare a C++ executable +## With catkin_make all packages are built within a single CMake context +## The recommended prefix ensures that target names across packages don't collide +# add_executable(${PROJECT_NAME}_node src/navigation_launcher_node.cpp) + +## Rename C++ executable without prefix +## The above recommended prefix causes long target names, the following renames the +## target back to the shorter version for ease of user use +## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node" +# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "") + +## Add cmake target dependencies of the executable +## same as for the library above +# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Specify libraries to link a library or executable target against +# target_link_libraries(${PROJECT_NAME}_node +# ${catkin_LIBRARIES} +# ) + +############# +## Install ## +############# + +# all install targets should use catkin DESTINATION variables +# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html + +## Mark executable scripts (Python etc.) for installation +## in contrast to setup.py, you can choose the destination +# install(PROGRAMS +# scripts/my_python_script +# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark executables and/or libraries for installation +# install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node +# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark cpp header files for installation +# install(DIRECTORY include/${PROJECT_NAME}/ +# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} +# FILES_MATCHING PATTERN "*.h" +# PATTERN ".svn" EXCLUDE +# ) + +## Mark other files for installation (e.g. launch and bag files, etc.) +# install(FILES +# # myfile1 +# # myfile2 +# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +# ) + +############# +## Testing ## +############# + +## Add gtest based cpp test target and link libraries +# catkin_add_gtest(${PROJECT_NAME}-test test/test_navigation_launcher.cpp) +# if(TARGET ${PROJECT_NAME}-test) +# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME}) +# endif() + +## Add folders to be run by python nosetests +# catkin_add_nosetests(test) diff --git a/linux/navigation_launcher/config/costmap_common.yaml b/linux/navigation_launcher/config/costmap_common.yaml new file mode 100644 index 0000000..cee5502 --- /dev/null +++ b/linux/navigation_launcher/config/costmap_common.yaml @@ -0,0 +1,24 @@ +footprint: [[-0.25, -0.05], [-0.25, 0.05], [0.12, 0.25], [0.20, 0.20], [0.20, -0.20], [0.12,-0.25]] +footprint_padding: 0.01 + +robot_base_frame: base_footprint +update_frequency: 4.0 +publish_frequency: 3.0 +transform_tolerance: 0.5 + +resolution: 0.05 + +obstacle_range: 5.5 +raytrace_range: 6.0 + +#layer definitions +static: + map_topic: /map + subscribe_to_updates: true + +#obstacles_laser: +# observation_sources: laser +# laser: {data_type: LaserScan, clearing: true, marking: true, topic: scan, inf_is_valid: true} + +inflation: + inflation_radius: 1.0 diff --git a/linux/navigation_launcher/launch/hololensstack.launch b/linux/navigation_launcher/launch/hololensstack.launch new file mode 100644 index 0000000..f4a2c8d --- /dev/null +++ b/linux/navigation_launcher/launch/hololensstack.launch @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/linux/navigation_launcher/launch/holonavstack.launch b/linux/navigation_launcher/launch/holonavstack.launch new file mode 100644 index 0000000..86e2b95 --- /dev/null +++ b/linux/navigation_launcher/launch/holonavstack.launch @@ -0,0 +1,42 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/linux/navigation_launcher/launch/navstack.launch b/linux/navigation_launcher/launch/navstack.launch new file mode 100644 index 0000000..c43ff88 --- /dev/null +++ b/linux/navigation_launcher/launch/navstack.launch @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/linux/navigation_launcher/package.xml b/linux/navigation_launcher/package.xml new file mode 100644 index 0000000..6dfb2d0 --- /dev/null +++ b/linux/navigation_launcher/package.xml @@ -0,0 +1,50 @@ + + + navigation_launcher + 0.0.0 + The navigation_launcher package + + + + + cvlros + + + + + + TODO + + + + + + + + + + + + + + + + + + + + + + + + + + catkin + + + + + + + + diff --git a/linux/navigation_launcher/params/map.yaml b/linux/navigation_launcher/params/map.yaml new file mode 100644 index 0000000..0280009 --- /dev/null +++ b/linux/navigation_launcher/params/map.yaml @@ -0,0 +1,6 @@ +image: left0000.jpg +resolution: 0.01 +origin: [0.0, 0.0, 0.0] +occupied_thresh: 0.65 +free_thresh: 0.196 +negate: 0 \ No newline at end of file diff --git a/linux/navigation_launcher/params/test.png b/linux/navigation_launcher/params/test.png new file mode 100644 index 0000000..67852f5 Binary files /dev/null and b/linux/navigation_launcher/params/test.png differ diff --git a/rviz/pepper.rviz b/rviz/pepper.rviz new file mode 100644 index 0000000..f991228 --- /dev/null +++ b/rviz/pepper.rviz @@ -0,0 +1,267 @@ +Panels: + - Class: rviz/Displays + Help Height: 78 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /Status1 + - /RobotModel1/Status1 + - /DepthCloud1/Status1 + - /DepthCloud1/Auto Size1 + - /DepthCloud1/Occlusion Compensation1 + - /TF1/Frames1 + Splitter Ratio: 0.37704917788505554 + Tree Height: 424 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.5886790156364441 + - Class: rviz/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Experimental: false + Name: Time + SyncMode: 2 + SyncSource: "" +Preferences: + PromptSaveOnExit: true +Toolbars: + toolButtonStyle: 2 +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: true + Line Style: + Line Width: 0.029999999329447746 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 10 + Reference Frame: + Value: true + - Alpha: 1 + Class: rviz/RobotModel + Collision Enabled: false + Enabled: true + Links: + All Links Enabled: true + Expand Joint Details: false + Expand Link Details: false + Expand Tree: false + Link Tree Style: Links in Alphabetic Order + Name: RobotModel + Robot Description: robot_description + TF Prefix: "" + Update Interval: 0 + Value: true + Visual Enabled: true + - Alpha: 1 + Auto Size: + Auto Size Factor: 1 + Value: true + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 10 + Min Value: -10 + Value: true + Axis: Z + Channel Name: z + Class: rviz/DepthCloud + Color: 255; 255; 255 + Color Image Topic: /pepper_robot/camera/front/image_raw + Color Transformer: RGB8 + Color Transport Hint: raw + Decay Time: 0 + Depth Map Topic: /pepper_robot/camera/depth/image_raw + Depth Map Transport Hint: raw + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Max Intensity: 7.51200008392334 + Min Color: 0; 0; 0 + Min Intensity: 1.5169999599456787 + Name: DepthCloud + Occlusion Compensation: + Occlusion Time-Out: 30 + Value: false + Position Transformer: XYZ + Queue Size: 5 + Selectable: true + Size (Pixels): 3 + Style: Squares + Topic Filter: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /pepper_robot/camera/front/image_raw + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/TF + Enabled: false + Frame Timeout: 15 + Frames: + All Enabled: true + Marker Scale: 1 + Name: TF + Show Arrows: true + Show Axes: true + Show Names: true + Tree: + {} + Update Interval: 0 + Value: false + - Alpha: 0.5 + Buffer Length: 1 + Class: rviz/Range + Color: 255; 255; 255 + Enabled: true + Name: Sonar_Front + Queue Size: 100 + Topic: /pepper_robot/sonar/front + Unreliable: false + Value: true + - Alpha: 0.5 + Buffer Length: 1 + Class: rviz/Range + Color: 255; 255; 255 + Enabled: true + Name: Sonar_Back + Queue Size: 100 + Topic: /pepper_robot/sonar/back + Unreliable: false + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 10 + Min Value: -10 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/LaserScan + Color: 255; 0; 0 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Max Intensity: 4096 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: LaserScan + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.05000000074505806 + Style: Flat Squares + Topic: /pepper_robot/laser + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 0.699999988079071 + Class: rviz/Map + Color Scheme: map + Draw Behind: false + Enabled: true + Name: Map + Topic: /map + Unreliable: false + Use Timestamp: false + Value: true + Enabled: true + Global Options: + Background Color: 48; 48; 48 + Default Light: true + Fixed Frame: base_footprint + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Theta std deviation: 0.2617993950843811 + Topic: /initialpose + X std deviation: 0.5 + Y std deviation: 0.5 + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/Orbit + Distance: 40.26953125 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Focal Point: + X: 7.949053764343262 + Y: 6.601431846618652 + Z: 3.630021333694458 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Current View + Near Clip Distance: 0.009999999776482582 + Pitch: 1.5697963237762451 + Target Frame: + Value: Orbit (rviz) + Yaw: 3.130409002304077 + Saved: ~ +Window Geometry: + Displays: + collapsed: false + Height: 1016 + Hide Left Dock: false + Hide Right Dock: false + Image: + collapsed: false + QMainWindow State: 000000ff00000000fd0000000400000000000002a10000035afc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d00000233000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d0061006700650100000276000001210000001600fffffffb0000000a0049006d00610067006501000002b2000001130000000000000000000000010000010000000385fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003d00000385000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000005a00000003efc0100000002fb0000000800540069006d00650100000000000005a0000002eb00fffffffb0000000800540069006d00650100000000000004500000000000000000000002f90000035a00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Selection: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Views: + collapsed: false + Width: 1440 + X: 85 + Y: 41 diff --git a/rviz/pepper_nav.rviz b/rviz/pepper_nav.rviz new file mode 100644 index 0000000..536d87e --- /dev/null +++ b/rviz/pepper_nav.rviz @@ -0,0 +1,661 @@ +Panels: + - Class: rviz/Displays + Help Height: 78 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /Status1 + - /RobotModel1 + - /DepthCloud1/Status1 + - /DepthCloud1/Auto Size1 + - /DepthCloud1/Occlusion Compensation1 + - /TF1/Frames1 + - /Map1 + Splitter Ratio: 0.37704917788505554 + Tree Height: 719 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.5886790156364441 + - Class: rviz/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Experimental: false + Name: Time + SyncMode: 2 + SyncSource: LaserScan +Preferences: + PromptSaveOnExit: true +Toolbars: + toolButtonStyle: 2 +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: true + Line Style: + Line Width: 0.029999999329447746 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 10 + Reference Frame: + Value: true + - Alpha: 1 + Class: rviz/RobotModel + Collision Enabled: false + Enabled: true + Links: + All Links Enabled: true + BumperB_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + BumperFL_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + BumperFR_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + CameraBottom_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + CameraBottom_optical_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + CameraDepth_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + CameraDepth_optical_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + CameraTop_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + CameraTop_optical_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + ChestButton_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + Expand Joint Details: false + Expand Link Details: false + Expand Tree: false + Head: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + HeadTouchFront_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + HeadTouchMiddle_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + HeadTouchRear_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + Hip: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + ImuBaseAccelerometer_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + ImuBaseGyrometer_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + ImuTorsoAccelerometer_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + ImuTorsoGyrometer_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + LBicep: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LElbow: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger11_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger12_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger13_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger21_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger22_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger23_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger31_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger32_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger33_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger41_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger42_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LFinger43_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LForeArm: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LHandTouchBack_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + LShoulder: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LSpeaker_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + LThumb1_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LThumb2_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + Link Tree Style: Links in Alphabetic Order + Neck: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + Pelvis: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RBicep: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RElbow: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger11_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger12_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger13_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger21_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger22_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger23_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger31_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger32_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger33_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger41_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger42_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RFinger43_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RForeArm: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RHandTouchBack_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + RShoulder: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RSpeaker_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + RThumb1_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RThumb2_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + ShovelLaser_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + SonarBack_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + SonarFront_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + SurroundingFrontLaser_device_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + SurroundingFrontLaser_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + SurroundingLeftLaser_device_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + SurroundingLeftLaser_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + SurroundingRightLaser_device_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + SurroundingRightLaser_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + Tablet_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + Tibia: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + VerticalLeftLaser_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + VerticalRightLaser_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + WheelB_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + WheelFL_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + WheelFR_link: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + base_footprint: + Alpha: 1 + Show Axes: false + Show Trail: false + base_link: + Alpha: 1 + Show Axes: false + Show Trail: false + l_gripper: + Alpha: 1 + Show Axes: false + Show Trail: false + l_wrist: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + r_gripper: + Alpha: 1 + Show Axes: false + Show Trail: false + r_wrist: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + torso: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + Name: RobotModel + Robot Description: robot_description + TF Prefix: "" + Update Interval: 0 + Value: true + Visual Enabled: true + - Alpha: 1 + Auto Size: + Auto Size Factor: 1 + Value: true + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 10 + Min Value: -10 + Value: true + Axis: Z + Channel Name: z + Class: rviz/DepthCloud + Color: 255; 255; 255 + Color Image Topic: /pepper_robot/camera/front/image_raw + Color Transformer: RGB8 + Color Transport Hint: raw + Decay Time: 0 + Depth Map Topic: /pepper_robot/camera/depth/image_raw + Depth Map Transport Hint: raw + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Max Intensity: 7.51200008392334 + Min Color: 0; 0; 0 + Min Intensity: 1.5169999599456787 + Name: DepthCloud + Occlusion Compensation: + Occlusion Time-Out: 30 + Value: false + Position Transformer: XYZ + Queue Size: 5 + Selectable: true + Size (Pixels): 3 + Style: Squares + Topic Filter: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Class: rviz/Image + Enabled: false + Image Topic: /pepper_robot/camera/front/image_raw + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/TF + Enabled: false + Frame Timeout: 15 + Frames: + All Enabled: true + Marker Scale: 1 + Name: TF + Show Arrows: true + Show Axes: true + Show Names: true + Tree: + {} + Update Interval: 0 + Value: false + - Alpha: 0.5 + Buffer Length: 1 + Class: rviz/Range + Color: 255; 255; 255 + Enabled: true + Name: Sonar_Front + Queue Size: 100 + Topic: /pepper_robot/sonar/front + Unreliable: false + Value: true + - Alpha: 0.5 + Buffer Length: 1 + Class: rviz/Range + Color: 255; 255; 255 + Enabled: true + Name: Sonar_Back + Queue Size: 100 + Topic: /pepper_robot/sonar/back + Unreliable: false + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 10 + Min Value: -10 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/LaserScan + Color: 255; 0; 0 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Max Intensity: 4096 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: LaserScan + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.05000000074505806 + Style: Flat Squares + Topic: /pepper_robot/laser + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 0.699999988079071 + Class: rviz/Map + Color Scheme: map + Draw Behind: false + Enabled: true + Name: Map + Topic: /map + Unreliable: false + Use Timestamp: false + Value: true + - Alpha: 0.699999988079071 + Class: rviz/Map + Color Scheme: map + Draw Behind: false + Enabled: true + Name: Map + Topic: /move_base/global_costmap/costmap + Unreliable: false + Use Timestamp: false + Value: true + Enabled: true + Global Options: + Background Color: 48; 48; 48 + Default Light: true + Fixed Frame: map + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Theta std deviation: 0.2617993950843811 + Topic: /initialpose + X std deviation: 0.5 + Y std deviation: 0.5 + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/Orbit + Distance: 9.423486709594727 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Focal Point: + X: 4.725549221038818 + Y: 3.7505757808685303 + Z: 3.6268298625946045 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Current View + Near Clip Distance: 0.009999999776482582 + Pitch: 1.5697963237762451 + Target Frame: + Value: Orbit (rviz) + Yaw: 3.130409002304077 + Saved: ~ +Window Geometry: + Displays: + collapsed: false + Height: 1016 + Hide Left Dock: false + Hide Right Dock: false + Image: + collapsed: false + QMainWindow State: 000000ff00000000fd0000000400000000000001910000035afc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d0000035a000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d0061006700650000000276000001210000001600fffffffb0000000a0049006d00610067006501000002b2000001130000000000000000000000010000010000000385fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003d00000385000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000005e70000003efc0100000002fb0000000800540069006d00650100000000000005e7000002eb00fffffffb0000000800540069006d00650100000000000004500000000000000000000004500000035a00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Selection: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Views: + collapsed: false + Width: 1511 + X: 67 + Y: 64 diff --git a/windows/MSRHoloLensSpatialMapping.sln b/windows/MSRHoloLensSpatialMapping.sln new file mode 100644 index 0000000..f941570 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping.sln @@ -0,0 +1,51 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.26228.4 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MSRHoloLensSpatialMapping", "MSRHoloLensSpatialMapping\MSRHoloLensSpatialMapping.vcxproj", "{B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|ARM = Debug|ARM + Debug|ARM64 = Debug|ARM64 + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|ARM = Release|ARM + Release|ARM64 = Release|ARM64 + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|ARM.ActiveCfg = Debug|ARM + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|ARM.Build.0 = Debug|ARM + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|ARM.Deploy.0 = Debug|ARM + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|ARM64.Build.0 = Debug|ARM64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|ARM64.Deploy.0 = Debug|ARM64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|x64.ActiveCfg = Debug|x64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|x64.Build.0 = Debug|x64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|x64.Deploy.0 = Debug|x64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|x86.ActiveCfg = Debug|Win32 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|x86.Build.0 = Debug|Win32 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Debug|x86.Deploy.0 = Debug|Win32 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|ARM.ActiveCfg = Release|ARM + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|ARM.Build.0 = Release|ARM + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|ARM.Deploy.0 = Release|ARM + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|ARM64.ActiveCfg = Release|ARM64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|ARM64.Build.0 = Release|ARM64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|ARM64.Deploy.0 = Release|ARM64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|x64.ActiveCfg = Release|x64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|x64.Build.0 = Release|x64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|x64.Deploy.0 = Release|x64 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|x86.ActiveCfg = Release|Win32 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|x86.Build.0 = Release|Win32 + {B65D7EF6-4502-4FF4-9C74-942A42E2DB5E}.Release|x86.Deploy.0 = Release|Win32 + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {0FE83196-7E59-41FD-A4EA-E38014624DBA} + EndGlobalSection +EndGlobal diff --git a/windows/MSRHoloLensSpatialMapping/AppView.cpp b/windows/MSRHoloLensSpatialMapping/AppView.cpp new file mode 100644 index 0000000..d39f3c5 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/AppView.cpp @@ -0,0 +1,231 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" +#include "AppView.h" + +#include + +using namespace HoloLensNavigation; + +using namespace concurrency; +using namespace Windows::ApplicationModel; +using namespace Windows::ApplicationModel::Activation; +using namespace Windows::ApplicationModel::Core; +using namespace Windows::Foundation; +using namespace Windows::Graphics::Holographic; +using namespace Windows::UI::Core; + +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +[Platform::MTAThread] +int main(Platform::Array^) +{ + // + // The main function is only used to initialize our IFrameworkView class. + + AppViewSource^ appViewSource = ref new ::AppViewSource(); + + CoreApplication::Run(appViewSource); + + return 0; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +IFrameworkView^ AppViewSource::CreateView() +{ + return ref new AppView(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +AppView::AppView() +{ +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::Initialize(CoreApplicationView^ applicationView) +{ + // + // The first method called when the IFrameworkView is being created. + + applicationView->Activated += ref new TypedEventHandler(this, &AppView::OnViewActivated); + + // Register event handlers for app lifecycle. + CoreApplication::Suspending += ref new EventHandler(this, &AppView::OnSuspending); + CoreApplication::Resuming += ref new EventHandler(this, &AppView::OnResuming); + + // + // At this point we have access to the device and we can create device-dependent + // resources. + m_deviceResources = std::make_shared(); + + m_main = std::make_unique(m_deviceResources); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::SetWindow(CoreWindow^ window) +{ + // + // Called when the CoreWindow object is created (or re-created). + + // Register for keypress notifications. + window->KeyDown += ref new TypedEventHandler(this, &AppView::OnKeyPressed); + + // Register for notification that the app window is being closed. + window->Closed += ref new TypedEventHandler(this, &AppView::OnWindowClosed); + + // Register for notifications that the app window is losing focus. + window->VisibilityChanged += ref new TypedEventHandler(this, &AppView::OnVisibilityChanged); + + // Create a holographic space for the core window for the current view. + // Presenting holographic frames that are created by this holographic space will put + // the app into exclusive mode. + m_holographicSpace = HolographicSpace::CreateForCoreWindow(window); + + // The DeviceResources class uses the preferred DXGI adapter ID from the holographic + // space (when available) to create a Direct3D device. The HolographicSpace + // uses this ID3D11Device to create and manage device-based resources such as + // swap chains. + m_deviceResources->SetHolographicSpace(m_holographicSpace); + + // The main class uses the holographic space for updates and rendering. + m_main->SetHolographicSpace(m_holographicSpace); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::Load(Platform::String^ entryPoint) +{ + // + // The Load method can be used to initialize scene resources or to load a + // previously saved app state. +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::Run() +{ + // + // This method is called after the window becomes active. It oversees the + // update, draw, and present loop, and it also oversees window message processing. + + while (!m_windowClosed) + { + if (m_windowVisible && (m_holographicSpace != nullptr)) + { + CoreWindow::GetForCurrentThread()->Dispatcher->ProcessEvents(CoreProcessEventsOption::ProcessAllIfPresent); + + m_main->StateReceiver(); + + HolographicFrame^ holographicFrame = m_main->Update(); + + if (m_main->Render(holographicFrame)) + { + // The holographic frame has an API that presents the swap chain for each + // holographic camera. + m_deviceResources->Present(holographicFrame); + } + + m_main->StateSender(); + } + else + { + CoreWindow::GetForCurrentThread()->Dispatcher->ProcessEvents(CoreProcessEventsOption::ProcessOneAndAllPending); + } + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::Uninitialize() +{ + // + // Terminate events do not cause Uninitialize to be called. It will be called if your IFrameworkView + // class is torn down while the app is in the foreground. + // This method is not often used, but IFrameworkView requires it and it will be called for + // holographic apps. +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::OnViewActivated(CoreApplicationView^ sender, IActivatedEventArgs^ args) +{ + // + // Called when the app view is activated. Activates the app's CoreWindow. + + // Run() won't start until the CoreWindow is activated. + sender->CoreWindow->Activate(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::OnSuspending(Platform::Object^ sender, SuspendingEventArgs^ args) +{ + // + // Save app state asynchronously after requesting a deferral. Holding a deferral + // indicates that the application is busy performing suspending operations. Be + // aware that a deferral may not be held indefinitely. After about five seconds, + // the app will be forced to exit. + SuspendingDeferral^ deferral = args->SuspendingOperation->GetDeferral(); + + create_task([this, deferral]() + { + m_deviceResources->Trim(); + + if (m_main != nullptr) + { + m_main->SaveAppState(); + } + + deferral->Complete(); + }); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::OnResuming(Platform::Object^ sender, Platform::Object^ args) +{ + // + // Restore any data or state that was unloaded on suspend. By default, data + // and state are persisted when resuming from suspend. Note that this event + // does not occur if the app was previously terminated. + + if (m_main != nullptr) + { + m_main->LoadAppState(); + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::OnVisibilityChanged(CoreWindow^ sender, VisibilityChangedEventArgs^ args) +{ + m_windowVisible = args->Visible; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::OnWindowClosed(CoreWindow^ sender, CoreWindowEventArgs^ args) +{ + m_windowClosed = true; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void AppView::OnKeyPressed(CoreWindow^ sender, KeyEventArgs^ args) +{ + // This code does not use keyboard input. +} diff --git a/windows/MSRHoloLensSpatialMapping/AppView.h b/windows/MSRHoloLensSpatialMapping/AppView.h new file mode 100644 index 0000000..499dd21 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/AppView.h @@ -0,0 +1,66 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include "Common\DeviceResources.h" +#include "MSRHololensSpatialMappingMain.h" + +namespace HoloLensNavigation +{ + // IFrameworkView class. Connects the app with the Windows shell and handles application lifecycle events. + ref class AppView sealed : public Windows::ApplicationModel::Core::IFrameworkView + { + public: + AppView(); + + // IFrameworkView Methods. + virtual void Initialize(Windows::ApplicationModel::Core::CoreApplicationView^ applicationView); + virtual void SetWindow(Windows::UI::Core::CoreWindow^ window); + virtual void Load(Platform::String^ entryPoint); + virtual void Run(); + virtual void Uninitialize(); + + protected: + // Application lifecycle event handlers. + void OnViewActivated(Windows::ApplicationModel::Core::CoreApplicationView^ sender, Windows::ApplicationModel::Activation::IActivatedEventArgs^ args); + void OnSuspending(Platform::Object^ sender, Windows::ApplicationModel::SuspendingEventArgs^ args); + void OnResuming(Platform::Object^ sender, Platform::Object^ args); + + // Window event handlers. + void OnVisibilityChanged(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::VisibilityChangedEventArgs^ args); + void OnWindowClosed(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::CoreWindowEventArgs^ args); + + // CoreWindow input event handlers. + void OnKeyPressed(Windows::UI::Core::CoreWindow^ sender, Windows::UI::Core::KeyEventArgs^ args); + + // DisplayInformation event handlers. + void OnDisplayContentsInvalidated(Windows::Graphics::Display::DisplayInformation^ sender, Platform::Object^ args); + + private: + std::unique_ptr m_main; + + std::shared_ptr m_deviceResources; + bool m_windowClosed = false; + bool m_windowVisible = true; + + // The holographic space the app will use for rendering. + Windows::Graphics::Holographic::HolographicSpace^ m_holographicSpace = nullptr; + }; + + // The entry point for the app. + ref class AppViewSource sealed : Windows::ApplicationModel::Core::IFrameworkViewSource + { + public: + virtual Windows::ApplicationModel::Core::IFrameworkView^ CreateView(); + }; + +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/Assets/LockScreenLogo.scale-200.png b/windows/MSRHoloLensSpatialMapping/Assets/LockScreenLogo.scale-200.png new file mode 100644 index 0000000..0ce8b8c Binary files /dev/null and b/windows/MSRHoloLensSpatialMapping/Assets/LockScreenLogo.scale-200.png differ diff --git a/windows/MSRHoloLensSpatialMapping/Assets/Logo_RobotHandshake_transparent.png b/windows/MSRHoloLensSpatialMapping/Assets/Logo_RobotHandshake_transparent.png new file mode 100644 index 0000000..1e1f316 Binary files /dev/null and b/windows/MSRHoloLensSpatialMapping/Assets/Logo_RobotHandshake_transparent.png differ diff --git a/windows/MSRHoloLensSpatialMapping/Assets/SplashScreen.scale-200.png b/windows/MSRHoloLensSpatialMapping/Assets/SplashScreen.scale-200.png new file mode 100644 index 0000000..76d0a81 Binary files /dev/null and b/windows/MSRHoloLensSpatialMapping/Assets/SplashScreen.scale-200.png differ diff --git a/windows/MSRHoloLensSpatialMapping/Assets/Square150x150Logo.scale-200.png b/windows/MSRHoloLensSpatialMapping/Assets/Square150x150Logo.scale-200.png new file mode 100644 index 0000000..dde3207 Binary files /dev/null and b/windows/MSRHoloLensSpatialMapping/Assets/Square150x150Logo.scale-200.png differ diff --git a/windows/MSRHoloLensSpatialMapping/Assets/Square44x44Logo.scale-200.png b/windows/MSRHoloLensSpatialMapping/Assets/Square44x44Logo.scale-200.png new file mode 100644 index 0000000..a6bbd84 Binary files /dev/null and b/windows/MSRHoloLensSpatialMapping/Assets/Square44x44Logo.scale-200.png differ diff --git a/windows/MSRHoloLensSpatialMapping/Assets/Square44x44Logo.targetsize-24_altform-unplated.png b/windows/MSRHoloLensSpatialMapping/Assets/Square44x44Logo.targetsize-24_altform-unplated.png new file mode 100644 index 0000000..a18fcdf Binary files /dev/null and b/windows/MSRHoloLensSpatialMapping/Assets/Square44x44Logo.targetsize-24_altform-unplated.png differ diff --git a/windows/MSRHoloLensSpatialMapping/Assets/StoreLogo.png b/windows/MSRHoloLensSpatialMapping/Assets/StoreLogo.png new file mode 100644 index 0000000..13d17a5 Binary files /dev/null and b/windows/MSRHoloLensSpatialMapping/Assets/StoreLogo.png differ diff --git a/windows/MSRHoloLensSpatialMapping/Assets/Wide310x150Logo.scale-200.png b/windows/MSRHoloLensSpatialMapping/Assets/Wide310x150Logo.scale-200.png new file mode 100644 index 0000000..bc160e1 Binary files /dev/null and b/windows/MSRHoloLensSpatialMapping/Assets/Wide310x150Logo.scale-200.png differ diff --git a/windows/MSRHoloLensSpatialMapping/Common/CameraResources.cpp b/windows/MSRHoloLensSpatialMapping/Common/CameraResources.cpp new file mode 100644 index 0000000..8fa0c3b --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Common/CameraResources.cpp @@ -0,0 +1,295 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" + +#include "CameraResources.h" +#include "Common\DirectXHelper.h" +#include "DeviceResources.h" +#include + +using namespace HoloLensNavigation; + +using namespace DirectX; +using namespace Microsoft::WRL; +using namespace Windows::Foundation::Numerics; +using namespace Windows::Graphics::DirectX::Direct3D11; +using namespace Windows::Graphics::Holographic; +using namespace Windows::Perception::Spatial; + +DX::CameraResources::CameraResources(HolographicCamera^ camera) : + m_holographicCamera(camera), + m_isStereo(camera->IsStereo), + m_d3dRenderTargetSize(camera->RenderTargetSize) +{ + m_d3dViewport = CD3D11_VIEWPORT( + 0.f, 0.f, + m_d3dRenderTargetSize.Width, + m_d3dRenderTargetSize.Height + ); +}; + +// Updates resources associated with a holographic camera's swap chain. +// The app does not access the swap chain directly, but it does create +// resource views for the back buffer. +void DX::CameraResources::CreateResourcesForBackBuffer( + DX::DeviceResources* pDeviceResources, + HolographicCameraRenderingParameters^ cameraParameters + ) +{ + const auto device = pDeviceResources->GetD3DDevice(); + + // Get the WinRT object representing the holographic camera's back buffer. + IDirect3DSurface^ surface = cameraParameters->Direct3D11BackBuffer; + + // Get a DXGI interface for the holographic camera's back buffer. + // Holographic cameras do not provide the DXGI swap chain, which is owned + // by the system. The Direct3D back buffer resource is provided using WinRT + // interop APIs. + ComPtr resource; + ThrowIfFailed( + GetDXGIInterfaceFromObject(surface, IID_PPV_ARGS(&resource)) + ); + + // Get a Direct3D interface for the holographic camera's back buffer. + ComPtr cameraBackBuffer; + ThrowIfFailed( + resource.As(&cameraBackBuffer) + ); + + // Determine if the back buffer has changed. If so, ensure that the render target view + // is for the current back buffer. + if (m_d3dBackBuffer.Get() != cameraBackBuffer.Get()) + { + // This can change every frame as the system moves to the next buffer in the + // swap chain. This mode of operation will occur when certain rendering modes + // are activated. + m_d3dBackBuffer = cameraBackBuffer; + + // Create a render target view of the back buffer. + // Creating this resource is inexpensive, and is better than keeping track of + // the back buffers in order to pre-allocate render target views for each one. + DX::ThrowIfFailed( + device->CreateRenderTargetView( + m_d3dBackBuffer.Get(), + nullptr, + &m_d3dRenderTargetView + ) + ); + + // Get the DXGI format for the back buffer. + // This information can be accessed by the app using CameraResources::GetBackBufferDXGIFormat(). + D3D11_TEXTURE2D_DESC backBufferDesc; + m_d3dBackBuffer->GetDesc(&backBufferDesc); + m_dxgiFormat = backBufferDesc.Format; + + // Check for render target size changes. + Windows::Foundation::Size currentSize = m_holographicCamera->RenderTargetSize; + if (m_d3dRenderTargetSize != currentSize) + { + // Set render target size. + m_d3dRenderTargetSize = currentSize; + + // A new depth stencil view is also needed. + m_d3dDepthStencilView.Reset(); + } + } + + // Refresh depth stencil resources, if needed. + if (m_d3dDepthStencilView == nullptr) + { + // Create a depth stencil view for use with 3D rendering if needed. + CD3D11_TEXTURE2D_DESC depthStencilDesc( + DXGI_FORMAT_R16_TYPELESS, + static_cast(m_d3dRenderTargetSize.Width), + static_cast(m_d3dRenderTargetSize.Height), + m_isStereo ? 2 : 1, // Create two textures when rendering in stereo. + 1, // Use a single mipmap level. + D3D11_BIND_DEPTH_STENCIL | D3D11_BIND_SHADER_RESOURCE + ); + + DX::ThrowIfFailed( + device->CreateTexture2D( + &depthStencilDesc, + nullptr, + &m_d3dDepthStencil + ) + ); + + CD3D11_DEPTH_STENCIL_VIEW_DESC depthStencilViewDesc( + m_isStereo ? D3D11_DSV_DIMENSION_TEXTURE2DARRAY : D3D11_DSV_DIMENSION_TEXTURE2D, + DXGI_FORMAT_D16_UNORM + ); + DX::ThrowIfFailed( + device->CreateDepthStencilView( + m_d3dDepthStencil.Get(), + &depthStencilViewDesc, + &m_d3dDepthStencilView + ) + ); + } + + // Create the constant buffer, if needed. + if (m_viewProjectionConstantBuffer == nullptr) + { + // Create a constant buffer to store view and projection matrices for the camera. + CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ViewProjectionConstantBuffer), D3D11_BIND_CONSTANT_BUFFER); + DX::ThrowIfFailed( + device->CreateBuffer( + &constantBufferDesc, + nullptr, + &m_viewProjectionConstantBuffer + ) + ); + } +} + +// Releases resources associated with a back buffer. +void DX::CameraResources::ReleaseResourcesForBackBuffer(DX::DeviceResources* pDeviceResources) +{ + const auto context = pDeviceResources->GetD3DDeviceContext(); + + // Release camera-specific resources. + m_d3dBackBuffer.Reset(); + m_d3dDepthStencil.Reset(); + m_d3dRenderTargetView.Reset(); + m_d3dDepthStencilView.Reset(); + m_viewProjectionConstantBuffer.Reset(); + + // Ensure system references to the back buffer are released by clearing the render + // target from the graphics pipeline state, and then flushing the Direct3D context. + ID3D11RenderTargetView* nullViews[D3D11_SIMULTANEOUS_RENDER_TARGET_COUNT] = { nullptr }; + context->OMSetRenderTargets(ARRAYSIZE(nullViews), nullViews, nullptr); + context->Flush(); +} + +// Updates the view/projection constant buffer for a holographic camera. +void DX::CameraResources::UpdateViewProjectionBuffer( + std::shared_ptr deviceResources, + HolographicCameraPose^ cameraPose, + SpatialCoordinateSystem^ coordinateSystem + ) +{ + // The system changes the viewport on a per-frame basis for system optimizations. + m_d3dViewport = CD3D11_VIEWPORT( + cameraPose->Viewport.Left, + cameraPose->Viewport.Top, + cameraPose->Viewport.Width, + cameraPose->Viewport.Height + ); + + // The projection transform for each frame is provided by the HolographicCameraPose. + HolographicStereoTransform cameraProjectionTransform = cameraPose->ProjectionTransform; + + // Get a container object with the view and projection matrices for the given + // pose in the given coordinate system. + Platform::IBox^ viewTransformContainer = cameraPose->TryGetViewTransform(coordinateSystem); + + // If TryGetViewTransform returns a null pointer, that means the pose and coordinate + // system cannot be understood relative to one another; content cannot be rendered + // in this coordinate system for the duration of the current frame. + // This usually means that positional tracking is not active for the current frame, in + // which case it is possible to use a SpatialLocatorAttachedFrameOfReference to render + // content that is not world-locked instead. + ViewProjectionConstantBuffer viewProjectionConstantBufferData; + bool viewTransformAcquired = viewTransformContainer != nullptr; + if (viewTransformAcquired) + { + // Otherwise, the set of view transforms can be retrieved. + HolographicStereoTransform viewCoordinateSystemTransform = viewTransformContainer->Value; + + // Update the view matrices. Holographic cameras (such as Microsoft HoloLens) are + // constantly moving relative to the world. The view matrices need to be updated + // every frame. + XMStoreFloat4x4( + &viewProjectionConstantBufferData.viewProjection[0], + XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Left) * XMLoadFloat4x4(&cameraProjectionTransform.Left)) + ); + XMStoreFloat4x4( + &viewProjectionConstantBufferData.viewProjection[1], + XMMatrixTranspose(XMLoadFloat4x4(&viewCoordinateSystemTransform.Right) * XMLoadFloat4x4(&cameraProjectionTransform.Right)) + ); + + float4x4 viewInverse; + bool invertible = Windows::Foundation::Numerics::invert(viewCoordinateSystemTransform.Left, &viewInverse); + if (invertible) + { + // For the purposes of this sample, use the camera position as a light source. + float4 cameraPosition = float4(viewInverse.m41, viewInverse.m42, viewInverse.m43, 0.f); + float4 lightPosition = cameraPosition + float4(0.f, 0.25f, 0.f, 0.f); + + XMStoreFloat4(&viewProjectionConstantBufferData.cameraPosition, DirectX::XMLoadFloat4(&cameraPosition)); + XMStoreFloat4(&viewProjectionConstantBufferData.lightPosition, DirectX::XMLoadFloat4(&lightPosition)); + } + } + + // Use the D3D device context to update Direct3D device-based resources. + const auto context = deviceResources->GetD3DDeviceContext(); + + // Loading is asynchronous. Resources must be created before they can be updated. + if (context == nullptr || m_viewProjectionConstantBuffer == nullptr || !viewTransformAcquired) + { + m_framePending = false; + } + else + { + // Update the view and projection matrices. + context->UpdateSubresource( + m_viewProjectionConstantBuffer.Get(), + 0, + nullptr, + &viewProjectionConstantBufferData, + 0, + 0 + ); + + m_framePending = true; + } +} + +// Gets the view-projection constant buffer for the HolographicCamera and attaches it +// to the shader pipeline. +bool DX::CameraResources::AttachViewProjectionBuffer( + std::shared_ptr deviceResources + ) +{ + // This method uses Direct3D device-based resources. + const auto context = deviceResources->GetD3DDeviceContext(); + + // Loading is asynchronous. Resources must be created before they can be updated. + // Cameras can also be added asynchronously, in which case they must be initialized + // before they can be used. + if (context == nullptr || m_viewProjectionConstantBuffer == nullptr || m_framePending == false) + { + return false; + } + + // Set the viewport for this camera. + context->RSSetViewports(1, &m_d3dViewport); + + // Send the constant buffer to the vertex shader. + context->VSSetConstantBuffers( + 1, + 1, + m_viewProjectionConstantBuffer.GetAddressOf() + ); + + // Send the constant buffer to the pixel shader. + context->PSSetConstantBuffers( + 1, + 1, + m_viewProjectionConstantBuffer.GetAddressOf() + ); + + m_framePending = false; + + return true; +} diff --git a/windows/MSRHoloLensSpatialMapping/Common/CameraResources.h b/windows/MSRHoloLensSpatialMapping/Common/CameraResources.h new file mode 100644 index 0000000..d9f9659 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Common/CameraResources.h @@ -0,0 +1,82 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include "Content\ShaderStructures.h" + +namespace DX +{ + class DeviceResources; + + // Manages DirectX device resources that are specific to a holographic camera, such as the + // back buffer, ViewProjection constant buffer, and viewport. + class CameraResources + { + public: + CameraResources(Windows::Graphics::Holographic::HolographicCamera^ holographicCamera); + + void CreateResourcesForBackBuffer( + DX::DeviceResources* pDeviceResources, + Windows::Graphics::Holographic::HolographicCameraRenderingParameters^ cameraParameters + ); + void ReleaseResourcesForBackBuffer( + DX::DeviceResources* pDeviceResources + ); + + void UpdateViewProjectionBuffer( + std::shared_ptr deviceResources, + Windows::Graphics::Holographic::HolographicCameraPose^ cameraPose, + Windows::Perception::Spatial::SpatialCoordinateSystem^ coordinateSystem); + + bool AttachViewProjectionBuffer( + std::shared_ptr deviceResources); + + // Direct3D device resources. + ID3D11RenderTargetView* GetBackBufferRenderTargetView() const { return m_d3dRenderTargetView.Get(); } + ID3D11DepthStencilView* GetDepthStencilView() const { return m_d3dDepthStencilView.Get(); } + ID3D11Texture2D* GetBackBufferTexture2D() const { return m_d3dBackBuffer.Get(); } + ID3D11Texture2D* GetDepthStencilTexture2D() const { return m_d3dDepthStencil.Get(); } + D3D11_VIEWPORT GetViewport() const { return m_d3dViewport; } + DXGI_FORMAT GetBackBufferDXGIFormat() const { return m_dxgiFormat; } + + // Render target properties. + Windows::Foundation::Size GetRenderTargetSize() const { return m_d3dRenderTargetSize; } + bool IsRenderingStereoscopic() const { return m_isStereo; } + + // The holographic camera these resources are for. + Windows::Graphics::Holographic::HolographicCamera^ GetHolographicCamera() const { return m_holographicCamera; } + + private: + // Direct3D rendering objects. Required for 3D. + Microsoft::WRL::ComPtr m_d3dRenderTargetView; + Microsoft::WRL::ComPtr m_d3dDepthStencilView; + Microsoft::WRL::ComPtr m_d3dBackBuffer; + Microsoft::WRL::ComPtr m_d3dDepthStencil; + + // Device resource to store view and projection matrices. + Microsoft::WRL::ComPtr m_viewProjectionConstantBuffer; + + // Direct3D rendering properties. + DXGI_FORMAT m_dxgiFormat; + Windows::Foundation::Size m_d3dRenderTargetSize; + D3D11_VIEWPORT m_d3dViewport; + + // Indicates whether the camera supports stereoscopic rendering. + bool m_isStereo = false; + + // Indicates whether this camera has a pending frame. + bool m_framePending = false; + + // Pointer to the holographic camera these resources are for. + Windows::Graphics::Holographic::HolographicCamera^ m_holographicCamera = nullptr; + }; +} diff --git a/windows/MSRHoloLensSpatialMapping/Common/DeviceResources.cpp b/windows/MSRHoloLensSpatialMapping/Common/DeviceResources.cpp new file mode 100644 index 0000000..e34345f --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Common/DeviceResources.cpp @@ -0,0 +1,361 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" +#include "DeviceResources.h" +#include "DirectXHelper.h" + +#include +#include + +using namespace D2D1; +using namespace Microsoft::WRL; +using namespace Windows::Graphics::DirectX::Direct3D11; +using namespace Windows::Graphics::Display; +using namespace Windows::Graphics::Holographic; + +// Constructor for DeviceResources. +DX::DeviceResources::DeviceResources() +{ + CreateDeviceIndependentResources(); +} + +// Configures resources that don't depend on the Direct3D device. +void DX::DeviceResources::CreateDeviceIndependentResources() +{ + // Initialize Direct2D resources. + D2D1_FACTORY_OPTIONS options {}; + +#if defined(_DEBUG) + // If the project is in a debug build, enable Direct2D debugging via SDK Layers. + options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION; +#endif + + // Initialize the Direct2D Factory. + DX::ThrowIfFailed( + D2D1CreateFactory( + D2D1_FACTORY_TYPE_SINGLE_THREADED, + __uuidof(ID2D1Factory2), + &options, + &m_d2dFactory + ) + ); + + // Initialize the DirectWrite Factory. + DX::ThrowIfFailed( + DWriteCreateFactory( + DWRITE_FACTORY_TYPE_SHARED, + __uuidof(IDWriteFactory2), + &m_dwriteFactory + ) + ); + + // Initialize the Windows Imaging Component (WIC) Factory. + DX::ThrowIfFailed( + CoCreateInstance( + CLSID_WICImagingFactory2, + nullptr, + CLSCTX_INPROC_SERVER, + IID_PPV_ARGS(&m_wicFactory) + ) + ); +} + +void DX::DeviceResources::SetHolographicSpace(HolographicSpace^ holographicSpace) +{ + // Cache the holographic space. Used to re-initalize during device-lost scenarios. + m_holographicSpace = holographicSpace; + + InitializeUsingHolographicSpace(); +} + +void DX::DeviceResources::InitializeUsingHolographicSpace() +{ + // The holographic space might need to determine which adapter supports + // holograms, in which case it will specify a non-zero PrimaryAdapterId. + LUID id = + { + m_holographicSpace->PrimaryAdapterId.LowPart, + m_holographicSpace->PrimaryAdapterId.HighPart + }; + + // When a primary adapter ID is given to the app, the app should find + // the corresponding DXGI adapter and use it to create Direct3D devices + // and device contexts. Otherwise, there is no restriction on the DXGI + // adapter the app can use. + if ((id.HighPart != 0) && (id.LowPart != 0)) + { + UINT createFlags = 0; +#ifdef DEBUG + if (DX::SdkLayersAvailable()) + { + createFlags |= DXGI_CREATE_FACTORY_DEBUG; + } +#endif + // Create the DXGI factory. + ComPtr dxgiFactory; + DX::ThrowIfFailed( + CreateDXGIFactory2( + createFlags, + IID_PPV_ARGS(&dxgiFactory) + ) + ); + ComPtr dxgiFactory4; + DX::ThrowIfFailed(dxgiFactory.As(&dxgiFactory4)); + + // Retrieve the adapter specified by the holographic space. + DX::ThrowIfFailed( + dxgiFactory4->EnumAdapterByLuid( + id, + IID_PPV_ARGS(&m_dxgiAdapter) + ) + ); + } + else + { + m_dxgiAdapter.Reset(); + } + + CreateDeviceResources(); + + m_holographicSpace->SetDirect3D11Device(m_d3dInteropDevice); +} + +// Configures the Direct3D device, and stores handles to it and the device context. +void DX::DeviceResources::CreateDeviceResources() +{ + // This flag adds support for surfaces with a different color channel ordering + // than the API default. It is required for compatibility with Direct2D. + UINT creationFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT; + +#if defined(_DEBUG) + if (DX::SdkLayersAvailable()) + { + // If the project is in a debug build, enable debugging via SDK Layers with this flag. + creationFlags |= D3D11_CREATE_DEVICE_DEBUG; + } +#endif + + // This array defines the set of DirectX hardware feature levels this app will support. + // Note the ordering should be preserved. + // Note that HoloLens supports feature level 11.1. The HoloLens emulator is also capable + // of running on graphics cards starting with feature level 10.0. + D3D_FEATURE_LEVEL featureLevels [] = + { + D3D_FEATURE_LEVEL_12_1, + D3D_FEATURE_LEVEL_12_0, + D3D_FEATURE_LEVEL_11_1, + D3D_FEATURE_LEVEL_11_0, + D3D_FEATURE_LEVEL_10_1, + D3D_FEATURE_LEVEL_10_0 + }; + + // Create the Direct3D 11 API device object and a corresponding context. + ComPtr device; + ComPtr context; + + const HRESULT hr = D3D11CreateDevice( + m_dxgiAdapter.Get(), // Either nullptr, or the primary adapter determined by Windows Holographic. + D3D_DRIVER_TYPE_HARDWARE, // Create a device using the hardware graphics driver. + 0, // Should be 0 unless the driver is D3D_DRIVER_TYPE_SOFTWARE. + creationFlags, // Set debug and Direct2D compatibility flags. + featureLevels, // List of feature levels this app can support. + ARRAYSIZE(featureLevels), // Size of the list above. + D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Runtime apps. + &device, // Returns the Direct3D device created. + &m_d3dFeatureLevel, // Returns feature level of device created. + &context // Returns the device immediate context. + ); + + if (FAILED(hr)) + { + // If the initialization fails, fall back to the WARP device. + // For more information on WARP, see: + // http://go.microsoft.com/fwlink/?LinkId=286690 + DX::ThrowIfFailed( + D3D11CreateDevice( + nullptr, // Use the default DXGI adapter for WARP. + D3D_DRIVER_TYPE_WARP, // Create a WARP device instead of a hardware device. + 0, + creationFlags, + featureLevels, + ARRAYSIZE(featureLevels), + D3D11_SDK_VERSION, + &device, + &m_d3dFeatureLevel, + &context + ) + ); + } + + // Store pointers to the Direct3D device and immediate context. + DX::ThrowIfFailed( + device.As(&m_d3dDevice) + ); + + DX::ThrowIfFailed( + context.As(&m_d3dContext) + ); + + // Acquire the DXGI interface for the Direct3D device. + ComPtr dxgiDevice; + DX::ThrowIfFailed( + m_d3dDevice.As(&dxgiDevice) + ); + + // Wrap the native device using a WinRT interop object. + m_d3dInteropDevice = CreateDirect3DDevice(dxgiDevice.Get()); + + // Cache the DXGI adapter. + // This is for the case of no preferred DXGI adapter, or fallback to WARP. + ComPtr dxgiAdapter; + DX::ThrowIfFailed( + dxgiDevice->GetAdapter(&dxgiAdapter) + ); + DX::ThrowIfFailed( + dxgiAdapter.As(&m_dxgiAdapter) + ); + + // Check for device support for the optional feature that allows setting the render target array index from the vertex shader stage. + D3D11_FEATURE_DATA_D3D11_OPTIONS3 options; + m_d3dDevice->CheckFeatureSupport(D3D11_FEATURE_D3D11_OPTIONS3, &options, sizeof(options)); + if (options.VPAndRTArrayIndexFromAnyShaderFeedingRasterizer) + { + m_supportsVprt = true; + } +} + +// Validates the back buffer for each HolographicCamera and recreates +// resources for back buffers that have changed. +// Locks the set of holographic camera resources until the function exits. +void DX::DeviceResources::EnsureCameraResources( + HolographicFrame^ frame, + HolographicFramePrediction^ prediction) +{ + UseHolographicCameraResources([this, frame, prediction](std::map>& cameraResourceMap) + { + for (const auto& pose : prediction->CameraPoses) + { + HolographicCameraRenderingParameters^ renderingParameters = frame->GetRenderingParameters(pose); + CameraResources* pCameraResources = cameraResourceMap[pose->HolographicCamera->Id].get(); + + pCameraResources->CreateResourcesForBackBuffer(this, renderingParameters); + } + }); +} + +// Prepares to allocate resources and adds resource views for a camera. +// Locks the set of holographic camera resources until the function exits. +void DX::DeviceResources::AddHolographicCamera(HolographicCamera^ camera) +{ + UseHolographicCameraResources([this, camera](std::map>& cameraResourceMap) + { + cameraResourceMap[camera->Id] = std::make_unique(camera); + }); +} + +// Deallocates resources for a camera and removes the camera from the set. +// Locks the set of holographic camera resources until the function exits. +void DX::DeviceResources::RemoveHolographicCamera(HolographicCamera^ camera) +{ + UseHolographicCameraResources([this, camera](std::map>& cameraResourceMap) + { + CameraResources* pCameraResources = cameraResourceMap[camera->Id].get(); + + if (pCameraResources != nullptr) + { + pCameraResources->ReleaseResourcesForBackBuffer(this); + cameraResourceMap.erase(camera->Id); + } + }); +} + +// Recreate all device resources and set them back to the current state. +// Locks the set of holographic camera resources until the function exits. +void DX::DeviceResources::HandleDeviceLost() +{ + if (m_deviceNotify != nullptr) + { + m_deviceNotify->OnDeviceLost(); + } + + UseHolographicCameraResources([this](std::map>& cameraResourceMap) + { + for (auto& pair : cameraResourceMap) + { + CameraResources* pCameraResources = pair.second.get(); + pCameraResources->ReleaseResourcesForBackBuffer(this); + } + }); + + InitializeUsingHolographicSpace(); + + if (m_deviceNotify != nullptr) + { + m_deviceNotify->OnDeviceRestored(); + } +} + +// Register our DeviceNotify to be informed on device lost and creation. +void DX::DeviceResources::RegisterDeviceNotify(DX::IDeviceNotify* deviceNotify) +{ + m_deviceNotify = deviceNotify; +} + +// Call this method when the app suspends. It provides a hint to the driver that the app +// is entering an idle state and that temporary buffers can be reclaimed for use by other apps. +void DX::DeviceResources::Trim() +{ + m_d3dContext->ClearState(); + + ComPtr dxgiDevice; + DX::ThrowIfFailed(m_d3dDevice.As(&dxgiDevice)); + dxgiDevice->Trim(); +} + +// Present the contents of the swap chain to the screen. +// Locks the set of holographic camera resources until the function exits. +void DX::DeviceResources::Present(HolographicFrame^ frame) +{ + // By default, this API waits for the frame to finish before it returns. + // Holographic apps should wait for the previous frame to finish before + // starting work on a new frame. This allows for better results from + // holographic frame predictions. + HolographicFramePresentResult presentResult = frame->PresentUsingCurrentPrediction(); + + HolographicFramePrediction^ prediction = frame->CurrentPrediction; + UseHolographicCameraResources([this, prediction](std::map>& cameraResourceMap) + { + for (auto cameraPose : prediction->CameraPoses) + { + // This represents the device-based resources for a HolographicCamera. + DX::CameraResources* pCameraResources = cameraResourceMap[cameraPose->HolographicCamera->Id].get(); + + // Discard the contents of the render target. + // This is a valid operation only when the existing contents will be + // entirely overwritten. If dirty or scroll rects are used, this call + // should be removed. + m_d3dContext->DiscardView(pCameraResources->GetBackBufferRenderTargetView()); + + // Discard the contents of the depth stencil. + m_d3dContext->DiscardView(pCameraResources->GetDepthStencilView()); + } + }); + + // The PresentUsingCurrentPrediction API will detect when the graphics device + // changes or becomes invalid. When this happens, it is considered a Direct3D + // device lost scenario. + if (presentResult == HolographicFramePresentResult::DeviceRemoved) + { + // The Direct3D device, context, and resources should be recreated. + HandleDeviceLost(); + } +} diff --git a/windows/MSRHoloLensSpatialMapping/Common/DeviceResources.h b/windows/MSRHoloLensSpatialMapping/Common/DeviceResources.h new file mode 100644 index 0000000..8fbb3db --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Common/DeviceResources.h @@ -0,0 +1,117 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include "CameraResources.h" + +namespace DX +{ + // Provides an interface for an application that owns DeviceResources to be notified of the device being lost or created. + interface IDeviceNotify + { + virtual void OnDeviceLost() = 0; + virtual void OnDeviceRestored() = 0; + }; + + // Creates and manages a Direct3D device and immediate context, Direct2D device and context (for debug), and the holographic swap chain. + class DeviceResources + { + public: + DeviceResources(); + + // Public methods related to Direct3D devices. + void HandleDeviceLost(); + void RegisterDeviceNotify(IDeviceNotify* deviceNotify); + void Trim(); + void Present(Windows::Graphics::Holographic::HolographicFrame^ frame); + + // Public methods related to holographic devices. + void SetHolographicSpace(Windows::Graphics::Holographic::HolographicSpace^ space); + void EnsureCameraResources( + Windows::Graphics::Holographic::HolographicFrame^ frame, + Windows::Graphics::Holographic::HolographicFramePrediction^ prediction); + + void AddHolographicCamera(Windows::Graphics::Holographic::HolographicCamera^ camera); + void RemoveHolographicCamera(Windows::Graphics::Holographic::HolographicCamera^ camera); + + // Holographic accessors. + template + RetType UseHolographicCameraResources(const LCallback& callback); + + Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice^ + GetD3DInteropDevice() const { return m_d3dInteropDevice; } + + // D3D accessors. + ID3D11Device4* GetD3DDevice() const { return m_d3dDevice.Get(); } + ID3D11DeviceContext3* GetD3DDeviceContext() const { return m_d3dContext.Get(); } + D3D_FEATURE_LEVEL GetDeviceFeatureLevel() const { return m_d3dFeatureLevel; } + bool GetDeviceSupportsVprt() const { return m_supportsVprt; } + + // DXGI acessors. + IDXGIAdapter3* GetDXGIAdapter() const { return m_dxgiAdapter.Get(); } + + // D2D accessors. + ID2D1Factory2* GetD2DFactory() const { return m_d2dFactory.Get(); } + IDWriteFactory2* GetDWriteFactory() const { return m_dwriteFactory.Get(); } + IWICImagingFactory2* GetWicImagingFactory() const { return m_wicFactory.Get(); } + + private: + // Private methods related to the Direct3D device, and resources based on that device. + void CreateDeviceIndependentResources(); + void InitializeUsingHolographicSpace(); + void CreateDeviceResources(); + + // Direct3D objects. + Microsoft::WRL::ComPtr m_d3dDevice; + Microsoft::WRL::ComPtr m_d3dContext; + Microsoft::WRL::ComPtr m_dxgiAdapter; + + // Direct3D interop objects. + Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice^ m_d3dInteropDevice; + + // Direct2D factories. + Microsoft::WRL::ComPtr m_d2dFactory; + Microsoft::WRL::ComPtr m_dwriteFactory; + Microsoft::WRL::ComPtr m_wicFactory; + + // The holographic space provides a preferred DXGI adapter ID. + Windows::Graphics::Holographic::HolographicSpace^ m_holographicSpace = nullptr; + + // Properties of the Direct3D device currently in use. + D3D_FEATURE_LEVEL m_d3dFeatureLevel = D3D_FEATURE_LEVEL_10_0; + + // The IDeviceNotify can be held directly as it owns the DeviceResources. + IDeviceNotify* m_deviceNotify = nullptr; + + // Whether or not the current Direct3D device supports the optional feature + // for setting the render target array index from the vertex shader stage. + bool m_supportsVprt = false; + + // Back buffer resources, etc. for attached holographic cameras. + std::map> m_cameraResources; + std::mutex m_cameraResourcesLock; + }; +} + +// Device-based resources for holographic cameras are stored in a std::map. Access this list by providing a +// callback to this function, and the std::map will be guarded from add and remove +// events until the callback returns. The callback is processed immediately and must +// not contain any nested calls to UseHolographicCameraResources. +// The callback takes a parameter of type std::map>& +// through which the list of cameras will be accessed. +template +RetType DX::DeviceResources::UseHolographicCameraResources(const LCallback& callback) +{ + std::lock_guard guard(m_cameraResourcesLock); + return callback(m_cameraResources); +} + diff --git a/windows/MSRHoloLensSpatialMapping/Common/DirectXHelper.h b/windows/MSRHoloLensSpatialMapping/Common/DirectXHelper.h new file mode 100644 index 0000000..073e36d --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Common/DirectXHelper.h @@ -0,0 +1,70 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include // For create_task + +namespace DX +{ + inline void ThrowIfFailed(HRESULT hr) + { + if (FAILED(hr)) + { + // Set a breakpoint on this line to catch Win32 API errors. + throw Platform::Exception::CreateException(hr); + } + } + + // Function that reads from a binary file asynchronously. + inline Concurrency::task> ReadDataAsync(const std::wstring& filename) + { + using namespace Windows::Storage; + using namespace Concurrency; + + return create_task(PathIO::ReadBufferAsync(Platform::StringReference(filename.c_str()))).then( + [] (Streams::IBuffer^ fileBuffer) -> std::vector + { + std::vector returnBuffer; + returnBuffer.resize(fileBuffer->Length); + Streams::DataReader::FromBuffer(fileBuffer)->ReadBytes(Platform::ArrayReference(returnBuffer.data(), (unsigned int)returnBuffer.size())); + return returnBuffer; + }); + } + + // Converts a length in device-independent pixels (DIPs) to a length in physical pixels. + inline float ConvertDipsToPixels(float dips, float dpi) + { + static const float dipsPerInch = 96.0f; + return floorf(dips * dpi / dipsPerInch + 0.5f); // Round to nearest integer. + } + +#if defined(_DEBUG) + // Check for SDK Layer support. + inline bool SdkLayersAvailable() + { + HRESULT hr = D3D11CreateDevice( + nullptr, + D3D_DRIVER_TYPE_NULL, // There is no need to create a real hardware device. + 0, + D3D11_CREATE_DEVICE_DEBUG, // Check for the SDK layers. + nullptr, // Any feature level will do. + 0, + D3D11_SDK_VERSION, // Always set this to D3D11_SDK_VERSION for Windows Runtime apps. + nullptr, // No need to keep the D3D device reference. + nullptr, // No need to know the feature level. + nullptr // No need to keep the D3D device context reference. + ); + + return SUCCEEDED(hr); + } +#endif +} diff --git a/windows/MSRHoloLensSpatialMapping/Common/StepTimer.h b/windows/MSRHoloLensSpatialMapping/Common/StepTimer.h new file mode 100644 index 0000000..6b983e3 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Common/StepTimer.h @@ -0,0 +1,200 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +namespace DX +{ + // Helper class for animation and simulation timing. + class StepTimer + { + public: + StepTimer() : + m_elapsedTicks(0), + m_totalTicks(0), + m_leftOverTicks(0), + m_frameCount(0), + m_framesPerSecond(0), + m_framesThisSecond(0), + m_qpcSecondCounter(0), + m_isFixedTimeStep(false), + m_targetElapsedTicks(TicksPerSecond / 60) + { + m_qpcFrequency = GetPerformanceFrequency(); + + // Initialize max delta to 1/10 of a second. + m_qpcMaxDelta = m_qpcFrequency / 10; + } + + // Get elapsed time since the previous Update call. + uint64 GetElapsedTicks() const { return m_elapsedTicks; } + double GetElapsedSeconds() const { return TicksToSeconds(m_elapsedTicks); } + + // Get total time since the start of the program. + uint64 GetTotalTicks() const { return m_totalTicks; } + double GetTotalSeconds() const { return TicksToSeconds(m_totalTicks); } + + // Get total number of updates since start of the program. + uint32 GetFrameCount() const { return m_frameCount; } + + // Get the current framerate. + uint32 GetFramesPerSecond() const { return m_framesPerSecond; } + + // Set whether to use fixed or variable timestep mode. + void SetFixedTimeStep(bool isFixedTimestep) { m_isFixedTimeStep = isFixedTimestep; } + + // Set how often to call Update when in fixed timestep mode. + void SetTargetElapsedTicks(uint64 targetElapsed) { m_targetElapsedTicks = targetElapsed; } + void SetTargetElapsedSeconds(double targetElapsed) { m_targetElapsedTicks = SecondsToTicks(targetElapsed); } + + // Integer format represents time using 10,000,000 ticks per second. + static const uint64 TicksPerSecond = 10'000'000; + + static double TicksToSeconds(uint64 ticks) { return static_cast(ticks) / TicksPerSecond; } + static uint64 SecondsToTicks(double seconds) { return static_cast(seconds * TicksPerSecond); } + + // Convenient wrapper for QueryPerformanceFrequency. Throws an exception if + // the call to QueryPerformanceFrequency fails. + static inline uint64 GetPerformanceFrequency() + { + LARGE_INTEGER freq; + if (!QueryPerformanceFrequency(&freq)) + { + throw ref new Platform::FailureException(); + } + return freq.QuadPart; + } + + // Gets the current number of ticks from QueryPerformanceCounter. Throws an + // exception if the call to QueryPerformanceCounter fails. + static inline int64 GetTicks() + { + LARGE_INTEGER ticks; + if (!QueryPerformanceCounter(&ticks)) + { + throw ref new Platform::FailureException(); + } + return ticks.QuadPart; + } + + // After an intentional timing discontinuity (for instance a blocking IO operation) + // call this to avoid having the fixed timestep logic attempt a set of catch-up + // Update calls. + + void ResetElapsedTime() + { + m_qpcLastTime = GetTicks(); + + m_leftOverTicks = 0; + m_framesPerSecond = 0; + m_framesThisSecond = 0; + m_qpcSecondCounter = 0; + } + + // Update timer state, calling the specified Update function the appropriate number of times. + template + void Tick(const TUpdate& update) + { + // Query the current time. + uint64 currentTime = GetTicks(); + uint64 timeDelta = currentTime - m_qpcLastTime; + + m_qpcLastTime = currentTime; + m_qpcSecondCounter += timeDelta; + + // Clamp excessively large time deltas (e.g. after paused in the debugger). + if (timeDelta > m_qpcMaxDelta) + { + timeDelta = m_qpcMaxDelta; + } + + // Convert QPC units into a canonical tick format. This cannot overflow due to the previous clamp. + timeDelta *= TicksPerSecond; + timeDelta /= m_qpcFrequency; + + uint32 lastFrameCount = m_frameCount; + + if (m_isFixedTimeStep) + { + // Fixed timestep update logic + + // If the app is running very close to the target elapsed time (within 1/4 of a millisecond) just clamp + // the clock to exactly match the target value. This prevents tiny and irrelevant errors + // from accumulating over time. Without this clamping, a game that requested a 60 fps + // fixed update, running with vsync enabled on a 59.94 NTSC display, would eventually + // accumulate enough tiny errors that it would drop a frame. It is better to just round + // small deviations down to zero to leave things running smoothly. + + if (abs(static_cast(timeDelta - m_targetElapsedTicks)) < TicksPerSecond / 4000) + { + timeDelta = m_targetElapsedTicks; + } + + m_leftOverTicks += timeDelta; + + while (m_leftOverTicks >= m_targetElapsedTicks) + { + m_elapsedTicks = m_targetElapsedTicks; + m_totalTicks += m_targetElapsedTicks; + m_leftOverTicks -= m_targetElapsedTicks; + m_frameCount++; + + update(); + } + } + else + { + // Variable timestep update logic. + m_elapsedTicks = timeDelta; + m_totalTicks += timeDelta; + m_leftOverTicks = 0; + m_frameCount++; + + update(); + } + + // Track the current framerate. + if (m_frameCount != lastFrameCount) + { + m_framesThisSecond++; + } + + if (m_qpcSecondCounter >= static_cast(m_qpcFrequency)) + { + m_framesPerSecond = m_framesThisSecond; + m_framesThisSecond = 0; + m_qpcSecondCounter %= m_qpcFrequency; + } + } + + private: + + // Source timing data uses QPC units. + uint64 m_qpcFrequency; + uint64 m_qpcLastTime; + uint64 m_qpcMaxDelta; + + // Derived timing data uses a canonical tick format. + uint64 m_elapsedTicks; + uint64 m_totalTicks; + uint64 m_leftOverTicks; + + // Members for tracking the framerate. + uint32 m_frameCount; + uint32 m_framesPerSecond; + uint32 m_framesThisSecond; + uint64 m_qpcSecondCounter; + + // Members for configuring fixed timestep mode. + bool m_isFixedTimeStep; + uint64 m_targetElapsedTicks; + }; +} diff --git a/windows/MSRHoloLensSpatialMapping/Content/GetDataFromIBuffer.h b/windows/MSRHoloLensSpatialMapping/Content/GetDataFromIBuffer.h new file mode 100644 index 0000000..bda5daa --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/GetDataFromIBuffer.h @@ -0,0 +1,57 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include +#include +#include + +using namespace Microsoft::WRL; +using namespace Windows::Storage::Streams; + +namespace HoloLensNavigation +{ + template + t* GetDataFromIBuffer(Windows::Storage::Streams::IBuffer^ container) + { + if (container == nullptr) + { + return nullptr; + } + + unsigned int bufferLength = container->Length; + + if (!(bufferLength > 0)) + { + return nullptr; + } + + HRESULT hr = S_OK; + + ComPtr pUnknown = reinterpret_cast(container); + ComPtr spByteAccess; + hr = pUnknown.As(&spByteAccess); + if (FAILED(hr)) + { + return nullptr; + } + + byte* pRawData = nullptr; + hr = spByteAccess->Buffer(&pRawData); + if (FAILED(hr)) + { + return nullptr; + } + + return reinterpret_cast(pRawData); + } +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/Content/RealTimeSurfaceMeshRenderer.cpp b/windows/MSRHoloLensSpatialMapping/Content/RealTimeSurfaceMeshRenderer.cpp new file mode 100644 index 0000000..09afb26 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/RealTimeSurfaceMeshRenderer.cpp @@ -0,0 +1,410 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" + +#include "Common\DirectXHelper.h" +#include "RealtimeSurfaceMeshRenderer.h" + +using namespace HoloLensNavigation; + +using namespace Concurrency; +using namespace DX; +using namespace Windows::Foundation::Collections; +using namespace Windows::Foundation::Numerics; +using namespace Windows::Perception::Spatial; +using namespace Windows::Perception::Spatial::Surfaces; + +using namespace Platform; + +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +RealtimeSurfaceMeshRenderer::RealtimeSurfaceMeshRenderer(const std::shared_ptr& deviceResources) : + m_deviceResources(deviceResources), + m_loadingComplete(false) +{ + m_meshCollection.clear(); + CreateDeviceDependentResources(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::Update(DX::StepTimer const & timer, SpatialCoordinateSystem ^ coordinateSystem) +{ + // + // Called once per frame, maintains and updates the mesh collection. + + std::lock_guard guard(m_meshCollectionLock); + + const float timeElapsed = static_cast(timer.GetTotalSeconds()); + + // Update meshes as needed, based on the current coordinate system. + // Also remove meshes that are inactive for too long. + for (auto iter = m_meshCollection.begin(); iter != m_meshCollection.end(); ) + { + auto& pair = *iter; + auto& surfaceMesh = pair.second; + + // Update the surface mesh. + surfaceMesh.UpdateTransform(m_deviceResources->GetD3DDevice(), m_deviceResources->GetD3DDeviceContext(), timer, coordinateSystem); + + // Check to see if the mesh has expired. + float lastActiveTime = surfaceMesh.GetLastActiveTime(); + float inactiveDuration = timeElapsed - lastActiveTime; + if (inactiveDuration > c_maxInactiveMeshTime) + { + // Surface mesh is expired. + m_meshCollection.erase(iter++); + } + else + { + ++iter; + } + }; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::AddSurface(Guid id, SpatialSurfaceInfo^ newSurface) +{ + auto fadeInMeshTask = AddOrUpdateSurfaceAsync(id, newSurface).then([this, id] () + { + if (HasSurface(id)) + { + std::lock_guard guard(m_meshCollectionLock); + + // In this example, new surfaces are treated differently by highlighting them in a different + // color. This allows you to observe changes in the spatial map that are due to new meshes, + // as opposed to mesh updates. + auto& surfaceMesh = m_meshCollection[id]; + surfaceMesh.SetColorFadeTimer(c_surfaceMeshFadeInTime); + } + }); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::UpdateSurface(Guid id, SpatialSurfaceInfo^ newSurface) +{ + AddOrUpdateSurfaceAsync(id, newSurface); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +Concurrency::task RealtimeSurfaceMeshRenderer::AddOrUpdateSurfaceAsync(Guid id, SpatialSurfaceInfo^ newSurface) +{ + auto options = ref new SpatialSurfaceMeshOptions(); + options->IncludeVertexNormals = true; + + // The level of detail setting is used to limit mesh complexity, by limiting the number + // of triangles per cubic meter. + auto createMeshTask = create_task(newSurface->TryComputeLatestMeshAsync(m_maxTrianglesPerCubicMeter, options)); + auto processMeshTask = createMeshTask.then([this, id](SpatialSurfaceMesh^ mesh) + { + if (mesh != nullptr) + { + std::lock_guard guard(m_meshCollectionLock); + + auto& surfaceMesh = m_meshCollection[id]; + surfaceMesh.UpdateSurface(mesh); + surfaceMesh.SetIsActive(true); + } + }, task_continuation_context::use_current()); + + return processMeshTask; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::RemoveSurface(Guid id) +{ + std::lock_guard guard(m_meshCollectionLock); + m_meshCollection.erase(id); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::ClearSurfaces() +{ + std::lock_guard guard(m_meshCollectionLock); + m_meshCollection.clear(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::HideInactiveMeshes(IMapView^ const& surfaceCollection) +{ + std::lock_guard guard(m_meshCollectionLock); + + // Hide surfaces that aren't actively listed in the surface collection. + for (auto& pair : m_meshCollection) + { + const auto& id = pair.first; + auto& surfaceMesh = pair.second; + + surfaceMesh.SetIsActive(surfaceCollection->HasKey(id) ? true : false); + }; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::Render(bool isStereo, bool useWireframe) +{ + // + // Renders one frame using the vertex, geometry, and pixel shaders. + + // Loading is asynchronous. Only draw geometry after it's loaded. + if (!m_loadingComplete) + { + return; + } + + auto context = m_deviceResources->GetD3DDeviceContext(); + + context->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST); + context->IASetInputLayout(m_inputLayout.Get()); + + // Attach our vertex shader. + context->VSSetShader(m_vertexShader.Get(), nullptr, 0); + + // The constant buffer is per-mesh, and will be set for each one individually. + if (!m_usingVprtShaders) + { + // Attach the passthrough geometry shader. + context->GSSetShader(m_geometryShader.Get(), nullptr, 0); + } + + if (useWireframe) + { + // Use a wireframe rasterizer state. + m_deviceResources->GetD3DDeviceContext()->RSSetState(m_wireframeRasterizerState.Get()); + + // Attach a pixel shader to render a solid color wireframe. + context->PSSetShader(m_colorPixelShader.Get(), nullptr, 0); + } + else + { + // Use the default rasterizer state. + m_deviceResources->GetD3DDeviceContext()->RSSetState(m_defaultRasterizerState.Get()); + + // Attach a pixel shader that can do lighting. + context->PSSetShader(m_lightingPixelShader.Get(), nullptr, 0); + } + + { + std::lock_guard guard(m_meshCollectionLock); + + // Draw the meshes. + auto device = m_deviceResources->GetD3DDevice(); + for (auto& pair : m_meshCollection) + { + auto & id = pair.first; + auto & surfaceMesh = pair.second; + + surfaceMesh.Draw(device, context, m_usingVprtShaders, isStereo); + } + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::CreateDeviceDependentResources() +{ + m_usingVprtShaders = m_deviceResources->GetDeviceSupportsVprt(); + + // On devices that do support the D3D11_FEATURE_D3D11_OPTIONS3:: + // VPAndRTArrayIndexFromAnyShaderFeedingRasterizer optional feature + // we can avoid using a pass-through geometry shader to set the render + // target array index, thus avoiding any overhead that would be + // incurred by setting the geometry shader stage. + std::wstring vertexShaderFileName = m_usingVprtShaders ? L"ms-appx:///SurfaceVprtVertexShader.cso" : L"ms-appx:///SurfaceVertexShader.cso"; + + // Load shaders asynchronously. + task> loadVSTask = DX::ReadDataAsync(vertexShaderFileName); + task> loadLightingPSTask = DX::ReadDataAsync(L"ms-appx:///SimpleLightingPixelShader.cso"); + task> loadWireframePSTask = DX::ReadDataAsync(L"ms-appx:///SolidColorPixelShader.cso"); + + task> loadGSTask; + + if (!m_usingVprtShaders) + { + // Load the pass-through geometry shader. + loadGSTask = DX::ReadDataAsync(L"ms-appx:///SurfaceGeometryShader.cso"); + } + + // After the vertex shader file is loaded, create the shader and input layout. + auto createVSTask = loadVSTask.then([this](const std::vector& fileData) { + DX::ThrowIfFailed( + m_deviceResources->GetD3DDevice()->CreateVertexShader(&fileData[0], fileData.size(), nullptr, &m_vertexShader) + ); + + static const D3D11_INPUT_ELEMENT_DESC vertexDesc[] = + { + { "POSITION", 0, DXGI_FORMAT_R16G16B16A16_SNORM, 0, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 }, + { "NORMAL", 0, DXGI_FORMAT_R8G8B8A8_SNORM, 1, D3D11_APPEND_ALIGNED_ELEMENT, D3D11_INPUT_PER_VERTEX_DATA, 0 }, + }; + + DX::ThrowIfFailed( + m_deviceResources->GetD3DDevice()->CreateInputLayout(vertexDesc, ARRAYSIZE(vertexDesc), &fileData[0], fileData.size(), &m_inputLayout) + ); + }); + + // After the pixel shader file is loaded, create the shader and constant buffer. + auto createLightingPSTask = loadLightingPSTask.then([this](const std::vector& fileData) { + DX::ThrowIfFailed( + m_deviceResources->GetD3DDevice()->CreatePixelShader(&fileData[0], fileData.size(), nullptr, &m_lightingPixelShader) + ); + }); + + // After the pixel shader file is loaded, create the shader and constant buffer. + auto createWireframePSTask = loadWireframePSTask.then([this](const std::vector& fileData) { + DX::ThrowIfFailed( + m_deviceResources->GetD3DDevice()->CreatePixelShader(&fileData[0], fileData.size(), nullptr, &m_colorPixelShader) + ); + }); + + task createGSTask; + if (!m_usingVprtShaders) + { + // After the pass-through geometry shader file is loaded, create the shader. + createGSTask = loadGSTask.then([this](const std::vector& fileData) + { + DX::ThrowIfFailed( + m_deviceResources->GetD3DDevice()->CreateGeometryShader(&fileData[0], fileData.size(), nullptr, &m_geometryShader) + ); + }); + } + + // Once all shaders are loaded, create the mesh. + task shaderTaskGroup = m_usingVprtShaders ? + (createLightingPSTask && createWireframePSTask && createVSTask) : + (createLightingPSTask && createWireframePSTask && createVSTask && createGSTask); + + // Once the cube is loaded, the object is ready to be rendered. + auto finishLoadingTask = shaderTaskGroup.then([this]() { + + // Recreate device-based surface mesh resources. + std::lock_guard guard(m_meshCollectionLock); + for (auto& iter : m_meshCollection) + { + iter.second.ReleaseDeviceDependentResources(); + iter.second.CreateDeviceDependentResources(m_deviceResources->GetD3DDevice()); + } + + // Create a default rasterizer state descriptor. + D3D11_RASTERIZER_DESC rasterizerDesc = CD3D11_RASTERIZER_DESC(D3D11_DEFAULT); + + // Create the default rasterizer state. + m_deviceResources->GetD3DDevice()->CreateRasterizerState(&rasterizerDesc, m_defaultRasterizerState.GetAddressOf()); + + // Change settings for wireframe rasterization. + rasterizerDesc.AntialiasedLineEnable = false; + rasterizerDesc.CullMode = D3D11_CULL_NONE; + rasterizerDesc.FillMode = D3D11_FILL_WIREFRAME; + + // Create a wireframe rasterizer state. + m_deviceResources->GetD3DDevice()->CreateRasterizerState(&rasterizerDesc, m_wireframeRasterizerState.GetAddressOf()); + + m_loadingComplete = true; + }); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::ReleaseDeviceDependentResources() +{ + m_loadingComplete = false; + + m_vertexShader.Reset(); + m_inputLayout.Reset(); + m_geometryShader.Reset(); + m_lightingPixelShader.Reset(); + m_colorPixelShader.Reset(); + + m_defaultRasterizerState.Reset(); + m_wireframeRasterizerState.Reset(); + + std::lock_guard guard(m_meshCollectionLock); + for (auto& iter : m_meshCollection) + { + iter.second.ReleaseDeviceDependentResources(); + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +bool RealtimeSurfaceMeshRenderer::HasSurface(Platform::Guid id) +{ + std::lock_guard guard(m_meshCollectionLock); + return m_meshCollection.find(id) != m_meshCollection.end(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +Windows::Foundation::DateTime RealtimeSurfaceMeshRenderer::GetLastUpdateTime(Platform::Guid id) +{ + std::lock_guard guard(m_meshCollectionLock); + auto& meshIter = m_meshCollection.find(id); + if (meshIter != m_meshCollection.end()) + { + auto const& mesh = meshIter->second; + return mesh.GetLastUpdateTime(); + } + else + { + static const Windows::Foundation::DateTime zero; + return zero; + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void RealtimeSurfaceMeshRenderer::CollectVertexData() +{ + std::lock_guard guard(m_meshCollectionLock); + + unsigned int uTotalBufferSize = 0; + + m_memVertexDataCollection.LockRWAccess(); + + // + // collect vertex buffers of all surface meshes into one data block + for (auto& pair : m_meshCollection) + { + auto& surfaceMesh = pair.second; + + StaticMemoryBuffer& memBuffer = surfaceMesh.GetVertexMemoryBuffer(); + + memBuffer.LockRWAccess(); + + unsigned char * pSrc = (unsigned char*)memBuffer.getPointer(); + unsigned int uSrcBufferSize = memBuffer.getBufferSize(); + + if (pSrc && uSrcBufferSize) { + unsigned char * pDst = (unsigned char*)m_memVertexDataCollection.realloc(uTotalBufferSize + uSrcBufferSize); + if (!pDst) { + memBuffer.UnlockRWAccess(); + break; + } + + memcpy(pDst + uTotalBufferSize, pSrc, uSrcBufferSize); + + uTotalBufferSize += uSrcBufferSize; + } + + memBuffer.UnlockRWAccess(); + } + + m_memVertexDataCollection.UnlockRWAccess(); + + // DebugMsgW(L"total buffer size: %u", uTotalBufferSize); +} diff --git a/windows/MSRHoloLensSpatialMapping/Content/RealTimeSurfaceMeshRenderer.h b/windows/MSRHoloLensSpatialMapping/Content/RealTimeSurfaceMeshRenderer.h new file mode 100644 index 0000000..8b8a7bb --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/RealTimeSurfaceMeshRenderer.h @@ -0,0 +1,102 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include "Common\DeviceResources.h" +#include "Common\StepTimer.h" +#include "Content\SurfaceMesh.h" +#include "Content\ShaderStructures.h" + +#include +#include +#include + +namespace HoloLensNavigation +{ + class RealtimeSurfaceMeshRenderer + { + public: + RealtimeSurfaceMeshRenderer(const std::shared_ptr& deviceResources); + void CreateDeviceDependentResources(); + void ReleaseDeviceDependentResources(); + void Update( + DX::StepTimer const& timer, + Windows::Perception::Spatial::SpatialCoordinateSystem^ coordinateSystem + ); + void Render(bool isStereo, bool useWireframe); + + bool HasSurface(Platform::Guid id); + void AddSurface(Platform::Guid id, Windows::Perception::Spatial::Surfaces::SpatialSurfaceInfo^ newSurface); + void UpdateSurface(Platform::Guid id, Windows::Perception::Spatial::Surfaces::SpatialSurfaceInfo^ newSurface); + void RemoveSurface(Platform::Guid id); + void ClearSurfaces(); + + Windows::Foundation::DateTime GetLastUpdateTime(Platform::Guid id); + + void HideInactiveMeshes( + Windows::Foundation::Collections::IMapView^ const& surfaceCollection); + + public: + void CollectVertexData(); + _inline StaticMemoryBuffer& GetVertexDataCollectionBuffer(void) { return m_memVertexDataCollection; } + + private: + Concurrency::task AddOrUpdateSurfaceAsync(Platform::Guid id, Windows::Perception::Spatial::Surfaces::SpatialSurfaceInfo^ newSurface); + + // Cached pointer to device resources. + std::shared_ptr m_deviceResources; + + // Direct3D resources for SR mesh rendering pipeline. + Microsoft::WRL::ComPtr m_inputLayout; + Microsoft::WRL::ComPtr m_vertexShader; + Microsoft::WRL::ComPtr m_geometryShader; + Microsoft::WRL::ComPtr m_lightingPixelShader; + Microsoft::WRL::ComPtr m_colorPixelShader; + + // The set of surfaces in the collection. + std::map m_meshCollection; + + // A way to lock map access. + std::mutex m_meshCollectionLock; + + // Total number of surface meshes. + unsigned int m_surfaceMeshCount; + + // Level of detail setting. The number of triangles that the system is allowed to provide per cubic meter. + double m_maxTrianglesPerCubicMeter = 1000.0; + + // If the current D3D Device supports VPRT, we can avoid using a geometry + // shader just to set the render target array index. + bool m_usingVprtShaders = false; + + // Rasterizer states, for different rendering modes. + Microsoft::WRL::ComPtr m_defaultRasterizerState; + Microsoft::WRL::ComPtr m_wireframeRasterizerState; + + // The duration of time, in seconds, a mesh is allowed to remain inactive before deletion. + const float c_maxInactiveMeshTime = 120.f; + + // The duration of time, in seconds, taken for a new surface mesh to fade in on-screen. + const float c_surfaceMeshFadeInTime = 2.0f; + + bool m_loadingComplete; + + private: + // + // we'll use a memory buffer which only grows in size as needed. Because we collect + // vertex data over and over, re-using previously allocated memory will drastically + // improve performance. + StaticMemoryBuffer m_memVertexDataCollection; + }; + +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/Content/ShaderStructures.h b/windows/MSRHoloLensSpatialMapping/Content/ShaderStructures.h new file mode 100644 index 0000000..12fa532 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/ShaderStructures.h @@ -0,0 +1,66 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +namespace HoloLensNavigation +{ + // Constant buffer used to send hologram position transform to the shader pipeline. + struct ModelConstantBuffer + { + DirectX::XMFLOAT4X4 modelToWorld; + }; + + // Assert that the constant buffer remains 16-byte aligned (best practice). + // If shader structure members are not aligned to a 4-float boundary, data may + // not show up where it is expected by the time it is read by the shader. + static_assert((sizeof(ModelConstantBuffer) % (sizeof(float) * 4)) == 0, "Model constant buffer size must be 16-byte aligned (16 bytes is the length of four floats)."); + + // Constant buffer used to send hologram position and normal transforms to the shader pipeline. + struct ModelNormalConstantBuffer + { + DirectX::XMFLOAT4X4 modelToWorld; + DirectX::XMFLOAT4X4 normalToWorld; + DirectX::XMFLOAT4 colorFadeFactor; + }; + + // Assert that the constant buffer remains 16-byte aligned (best practice). + // If shader structure members are not aligned to a 4-float boundary, data may + // not show up where it is expected by the time it is read by the shader. + static_assert((sizeof(ModelNormalConstantBuffer) % (sizeof(float) * 4)) == 0, "Model/normal constant buffer size must be 16-byte aligned (16 bytes is the length of four floats)."); + + // Constant buffer used to send the view-projection matrices to the shader pipeline. + struct ViewProjectionConstantBuffer + { + DirectX::XMFLOAT4 cameraPosition; + DirectX::XMFLOAT4 lightPosition; + DirectX::XMFLOAT4X4 viewProjection[2]; + }; + + // Assert that the constant buffer remains 16-byte aligned (best practice). + static_assert((sizeof(ViewProjectionConstantBuffer) % (sizeof(float) * 4)) == 0, "View/projection constant buffer size must be 16-byte aligned (16 bytes is the length of four floats)."); + + // Used to send per-vertex data to the vertex shader. + struct VertexPositionColor + { + DirectX::XMFLOAT3 pos; + DirectX::XMFLOAT3 color; + }; + + // Used to send per-vertex data to the vertex shader, including a normal. + struct VertexPositionNormalColor + { + DirectX::XMFLOAT3 pos; + DirectX::XMFLOAT3 normal; + DirectX::XMFLOAT3 color; + }; + +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/Content/SimpleLightingPixelShader.hlsl b/windows/MSRHoloLensSpatialMapping/Content/SimpleLightingPixelShader.hlsl new file mode 100644 index 0000000..8532578 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SimpleLightingPixelShader.hlsl @@ -0,0 +1,86 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +// A constant buffer that stores per-mesh data. +cbuffer ModelConstantBuffer : register(b0) +{ + float4x4 modelToWorld; + min16float4x4 normalToWorld; + min16float3 colorFadeFactor; +}; + +// A constant buffer that stores each set of view and projection matrices in column-major format. +cbuffer ViewProjectionConstantBuffer : register(b1) +{ + float4 cameraPosition; + float4 lightPosition; + float4x4 viewProjection[2]; +}; + +// Per-pixel data. +struct PixelShaderInput +{ + min16float4 screenPos : SV_POSITION; + min16float3 worldPos : POSITION0; + min16float3 worldNorm : NORMAL0; + min16float3 color : COLOR0; + uint idx : TEXCOORD0; +}; + +//-------------------------------------------------------------------------------------- +// The pixel shader applies simplified Blinn-Phong BRDF lighting. +min16float4 main(PixelShaderInput input) : SV_TARGET +{ + min16float3 lightDiffuseColorValue = min16float3(1.f, 1.f, 1.f); + min16float3 objectBaseColorValue = min16float3(input.color); + + // N is the surface normal, which points directly away from the surface. + min16float3 N = normalize(input.worldNorm); + + // L is the light incident vector, which is a normal that points from the surface to the light. + min16float3 lightIncidentVectorNotNormalized = min16float3(lightPosition.xyz - input.worldPos); + min16float distanceFromSurfaceToLight = length(lightIncidentVectorNotNormalized); + min16float oneOverDistanceFromSurfaceToLight = min16float(1.f) / distanceFromSurfaceToLight; + min16float3 L = normalize(lightIncidentVectorNotNormalized); + + // V is the camera incident vector, which is a normal that points from the surface to the camera. + min16float3 V = normalize(min16float3(cameraPosition.xyz - input.worldPos)); + + // H is a normalized vector that is halfway between L and V. + min16float3 H = normalize(L + V); + + // We take the dot products of N with L and H. + min16float nDotL = dot(N, L); + min16float nDotH = dot(N, H); + + // The dot products should be clamped to 0 as a lower bound. + min16float clampedNDotL = max(min16float(0.f), nDotL); + min16float clampedNDotH = max(min16float(0.f), nDotH); + + // We can then use dot(N, L) to determine the diffuse lighting contribution. + min16float3 diffuseColor = lightDiffuseColorValue * objectBaseColorValue * clampedNDotL; + + // The specular contribution is based on dot(N, H). + const min16float specularExponent = min16float(4.f); + const min16float3 specularColorValue = min16float3(1.f, 1.f, 0.9f); + const min16float3 specularColor = specularColorValue * pow(clampedNDotH, specularExponent) * oneOverDistanceFromSurfaceToLight; + + // Now, we can sum the ambient, diffuse, and specular contributions to determine the lighting value for the pixel. + const min16float3 surfaceLitColor = objectBaseColorValue * min16float(0.2f) + diffuseColor * min16float(0.6f) + specularColor * min16float(0.2f); + + // In this example, new surfaces are treated differently by highlighting them in a different + // color. This allows you to observe changes in the spatial map that are due to new meshes, + // as opposed to mesh updates. + const min16float3 oneMinusColorFadeFactor = min16float3(1.f, 1.f, 1.f) - (min16float3)colorFadeFactor; + const min16float3 fadedColor = (surfaceLitColor * oneMinusColorFadeFactor) + (min16float3(0.1f, 0.1f, 0.75f) * (min16float3)colorFadeFactor); + + return min16float4(fadedColor, 1.f); +} diff --git a/windows/MSRHoloLensSpatialMapping/Content/SolidColorPixelShader.hlsl b/windows/MSRHoloLensSpatialMapping/Content/SolidColorPixelShader.hlsl new file mode 100644 index 0000000..06ed300 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SolidColorPixelShader.hlsl @@ -0,0 +1,40 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +// A constant buffer that stores per-mesh data. +cbuffer ModelConstantBuffer : register(b0) +{ + float4x4 modelToWorld; + min16float4x4 normalToWorld; + min16float4 colorFadeFactor; +}; + +// Per-pixel data. +struct PixelShaderInput +{ + min16float4 screenPos : SV_POSITION; + min16float3 worldPos : POSITION0; + min16float3 worldNorm : NORMAL0; + min16float3 color : COLOR0; + uint idx : TEXCOORD0; +}; + +// The pixel shader returns a solid color (red). +min16float4 main(PixelShaderInput input) : SV_TARGET +{ + // In this example, new surfaces are treated differently by highlighting them in a different + // color. This allows you to observe changes in the spatial map that are due to new meshes, + // as opposed to mesh updates. + const min16float3 oneMinusColorFadeFactor = min16float3(1.f, 1.f, 1.f) - (min16float3)colorFadeFactor; + const min16float3 fadedColor = (input.color * oneMinusColorFadeFactor) + (min16float3(0.1f, 0.1f, 0.75f) * (min16float3)colorFadeFactor); + + return min16float4(fadedColor, 1.f); +} diff --git a/windows/MSRHoloLensSpatialMapping/Content/SpatialInputHandler.cpp b/windows/MSRHoloLensSpatialMapping/Content/SpatialInputHandler.cpp new file mode 100644 index 0000000..a287f6c --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SpatialInputHandler.cpp @@ -0,0 +1,56 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" +#include "SpatialInputHandler.h" +#include + +using namespace HoloLensNavigation; + +using namespace Windows::Foundation; +using namespace Windows::UI::Input::Spatial; +using namespace std::placeholders; + +// Creates and initializes a GestureRecognizer that listens to a Person. +SpatialInputHandler::SpatialInputHandler() +{ + // The interaction manager provides an event that informs the app when + // spatial interactions are detected. + m_interactionManager = SpatialInteractionManager::GetForCurrentView(); + + // Bind a handler to the SourcePressed event. + m_sourcePressedEventToken = + m_interactionManager->SourcePressed += + ref new TypedEventHandler( + bind(&SpatialInputHandler::OnSourcePressed, this, _1, _2) + ); +} + +SpatialInputHandler::~SpatialInputHandler() +{ + // Unregister our handler for the OnSourcePressed event. + m_interactionManager->SourcePressed -= m_sourcePressedEventToken; +} + +// Checks if the user performed an input gesture since the last call to this method. +// Allows the main update loop to check for asynchronous changes to the user +// input state. +SpatialInteractionSourceState^ SpatialInputHandler::CheckForInput() +{ + SpatialInteractionSourceState^ sourceState = m_sourceState; + m_sourceState = nullptr; + return sourceState; +} + +void SpatialInputHandler::OnSourcePressed(SpatialInteractionManager^ sender, SpatialInteractionSourceEventArgs^ args) +{ + m_sourceState = args->State; +} diff --git a/windows/MSRHoloLensSpatialMapping/Content/SpatialInputHandler.h b/windows/MSRHoloLensSpatialMapping/Content/SpatialInputHandler.h new file mode 100644 index 0000000..b1c65de --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SpatialInputHandler.h @@ -0,0 +1,42 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +namespace HoloLensNavigation +{ + // Sample gesture handler. + // Hooks up events to recognize a tap gesture, and keeps track of input using a boolean value. + class SpatialInputHandler + { + public: + SpatialInputHandler(); + ~SpatialInputHandler(); + + Windows::UI::Input::Spatial::SpatialInteractionSourceState^ CheckForInput(); + + private: + // Interaction event handler. + void OnSourcePressed( + Windows::UI::Input::Spatial::SpatialInteractionManager^ sender, + Windows::UI::Input::Spatial::SpatialInteractionSourceEventArgs^ args); + + // API objects used to process gesture input, and generate gesture events. + Windows::UI::Input::Spatial::SpatialInteractionManager^ m_interactionManager; + + // Event registration token. + Windows::Foundation::EventRegistrationToken m_sourcePressedEventToken; + + // Used to indicate that a Pressed input event was received this frame. + Windows::UI::Input::Spatial::SpatialInteractionSourceState^ m_sourceState = nullptr; + }; + +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/Content/SurfaceGeometryShader.hlsl b/windows/MSRHoloLensSpatialMapping/Content/SurfaceGeometryShader.hlsl new file mode 100644 index 0000000..ff8ced1 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SurfaceGeometryShader.hlsl @@ -0,0 +1,50 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +// Per-vertex data from the vertex shader. +struct GeometryShaderInput +{ + min16float4 screenPos : SV_POSITION; + min16float3 worldPos : POSITION0; + min16float3 worldNorm : NORMAL0; + min16float3 color : COLOR0; + uint instId : TEXCOORD0; +}; + +// Per-vertex data passed to the rasterizer. +struct GeometryShaderOutput +{ + min16float4 screenPos : SV_POSITION; + min16float3 worldPos : POSITION0; + min16float3 worldNorm : NORMAL0; + min16float3 color : COLOR0; + uint idx : TEXCOORD0; + uint rtvId : SV_RenderTargetArrayIndex; +}; + +// This geometry shader is a pass-through that leaves the geometry unmodified +// and sets the render target array index. +[maxvertexcount(3)] +void main(triangle GeometryShaderInput input[3], inout TriangleStream outStream) +{ + GeometryShaderOutput output; + [unroll(3)] + for (int i = 0; i < 3; ++i) + { + output.screenPos = input[i].screenPos; + output.worldPos = input[i].worldPos; + output.worldNorm = input[i].worldNorm; + output.color = input[i].color; + output.idx = input[i].instId; + output.rtvId = input[i].instId; + outStream.Append(output); + } +} diff --git a/windows/MSRHoloLensSpatialMapping/Content/SurfaceMesh.cpp b/windows/MSRHoloLensSpatialMapping/Content/SurfaceMesh.cpp new file mode 100644 index 0000000..4def245 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SurfaceMesh.cpp @@ -0,0 +1,532 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" + +#include + +#include "Common\DirectXHelper.h" +#include "Common\StepTimer.h" +#include "GetDataFromIBuffer.h" +#include "SurfaceMesh.h" + +using namespace HoloLensNavigation; +using namespace DirectX; +using namespace Windows::Perception::Spatial; +using namespace Windows::Perception::Spatial::Surfaces; +using namespace Windows::Foundation::Numerics; + +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +SurfaceMesh::SurfaceMesh() +{ + InitializeCriticalSection(&m_csMeshResources); + InitializeCriticalSection(&m_csUpdateVertexResources); + + CSLock lock(&m_csMeshResources); + + ReleaseDeviceDependentResources(); + m_lastUpdateTime.UniversalTime = 0; + m_fClosing = false; + + m_lastActiveTime = static_cast(_pGlobalTimer->GetTotalSeconds()); + + m_smud.surfaceMesh = nullptr; + m_smud.pd3dDevice = nullptr; + + CreateUpdateVertexResourcesWorkerThread(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +SurfaceMesh::~SurfaceMesh() +{ + ExitUpdateVertexResourcesWorkerThread(); + + { + CSLock lock(&m_csMeshResources); + + ReleaseDeviceDependentResources(); + } + + { + CSLock lock(&m_csUpdateVertexResources); + + m_fClosing = true; + } + + DeleteCriticalSection(&m_csUpdateVertexResources); + DeleteCriticalSection(&m_csMeshResources); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::UpdateSurface(SpatialSurfaceMesh ^ surfaceMesh) +{ + m_pendingSurfaceMesh = surfaceMesh; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::UpdateTransform(ID3D11Device * device, ID3D11DeviceContext * context, DX::StepTimer const & timer, SpatialCoordinateSystem ^ baseCoordinateSystem) +{ + // + // Spatial Mapping surface meshes each have a transform. This transform is updated every frame. + + UpdateVertexResources(device); + + { + CSLock csLock(&m_csMeshResources); + + m_baseCoordinateSystem = baseCoordinateSystem; + + if (m_updateReady) + { + // Surface mesh resources are created off-thread so that they don't affect rendering latency. + // When a new update is ready, we should begin using the updated vertex position, normal, and + // index buffers. + SwapVertexBuffers(); + m_updateReady = false; + } + } + + XMMATRIX transform; + if (m_isActive) + { + // In this example, new surfaces are treated differently by highlighting them in a different + // color. This allows you to observe changes in the spatial map that are due to new meshes, + // as opposed to mesh updates. + if (m_colorFadeTimeout > 0.f) + { + m_colorFadeTimer += static_cast(timer.GetElapsedSeconds()); + if (m_colorFadeTimer < m_colorFadeTimeout) + { + float colorFadeFactor = min(1.f, m_colorFadeTimeout - m_colorFadeTimer); + m_constantBufferData.colorFadeFactor = XMFLOAT4(colorFadeFactor, colorFadeFactor, colorFadeFactor, 1.f); + } + else + { + m_constantBufferData.colorFadeFactor = XMFLOAT4(0.f, 0.f, 0.f, 0.f); + m_colorFadeTimer = m_colorFadeTimeout = -1.f; + } + } + + // The transform is updated relative to a SpatialCoordinateSystem. In the SurfaceMesh class, we + // expect to be given the same SpatialCoordinateSystem that will be used to generate view + // matrices, because this class uses the surface mesh for rendering. + // Other applications could potentially involve using a SpatialCoordinateSystem from a stationary + // reference frame that is being used for physics simulation, etc. + auto tryTransform = m_meshProperties.coordinateSystem ? m_meshProperties.coordinateSystem->TryGetTransformTo(baseCoordinateSystem) : nullptr; + if (tryTransform != nullptr) + { + // If the transform can be acquired, this spatial mesh is valid right now and + // we have the information we need to draw it this frame. + transform = XMLoadFloat4x4(&tryTransform->Value); + m_lastActiveTime = static_cast(timer.GetTotalSeconds()); + } + else + { + // If the transform cannot be acquired, the spatial mesh is not valid right now + // because its location cannot be correlated to the current space. + m_isActive = false; + } + } + + if (!m_isActive) + { + // If for any reason the surface mesh is not active this frame - whether because + // it was not included in the observer's collection, or because its transform was + // not located - we don't have the information we need to update it. + return; + } + + // Set up a transform from surface mesh space, to world space. + XMMATRIX scaleTransform = XMMatrixScalingFromVector(XMLoadFloat3(&m_meshProperties.vertexPositionScale)); + + XMStoreFloat4x4(&m_constantBufferData.modelToWorld, XMMatrixTranspose(scaleTransform * transform)); + + // Surface meshes come with normals, which are also transformed from surface mesh space, to world space. + XMMATRIX normalTransform = transform; + + // Normals are not translated, so we remove the translation component here. + normalTransform.r[3] = XMVectorSet(0.f, 0.f, 0.f, XMVectorGetW(normalTransform.r[3])); + XMStoreFloat4x4(&m_constantBufferData.normalToWorld, XMMatrixTranspose(normalTransform)); + + if (!m_constantBufferCreated) + { + // If loading is not yet complete, we cannot actually update the graphics resources. + // This return is intentionally placed after the surface mesh updates so that this + // code may be copied and re-used for CPU-based processing of surface data. + CreateDeviceDependentResources(device); + return; + } + + context->UpdateSubresource(m_modelTransformBuffer.Get(), 0, NULL, &m_constantBufferData, 0, 0); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::Draw(ID3D11Device* device, ID3D11DeviceContext* context, bool usingVprtShaders, bool isStereo) +{ + // + // Does an indexed, instanced draw call after setting the IA stage to use the mesh's geometry, and + // after setting up the constant buffer for the surface mesh. + // The caller is responsible for the rest of the shader pipeline. + + if (!m_constantBufferCreated || !m_loadingComplete) + { + // Resources are still being initialized. + return; + } + + if (!m_isActive) + { + // Mesh is not active this frame, and should not be drawn. + return; + } + + // The vertices are provided in {vertex, normal} format + + UINT strides [] = { m_meshProperties.vertexStride, m_meshProperties.normalStride }; + UINT offsets [] = { 0, 0 }; + ID3D11Buffer* buffers [] = { m_vertexPositions.Get(), m_vertexNormals.Get() }; + + context->IASetVertexBuffers(0, ARRAYSIZE(buffers), buffers, strides, offsets); + context->IASetIndexBuffer(m_triangleIndices.Get(), m_meshProperties.indexFormat, 0); + context->VSSetConstantBuffers(0, 1, m_modelTransformBuffer.GetAddressOf()); + + if (!usingVprtShaders) + { + context->GSSetConstantBuffers(0, 1, m_modelTransformBuffer.GetAddressOf()); + } + + context->PSSetConstantBuffers(0, 1, m_modelTransformBuffer.GetAddressOf()); + + context->DrawIndexedInstanced( + m_meshProperties.indexCount, // Index count per instance. + isStereo ? 2 : 1, // Instance count. + 0, // Start index location. + 0, // Base vertex location. + 0 // Start instance location. + ); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::CreateDirectXBuffer(ID3D11Device * device, D3D11_BIND_FLAG binding, IBuffer ^ buffer, ID3D11Buffer ** target) +{ + auto length = buffer->Length; + + CD3D11_BUFFER_DESC bufferDescription(buffer->Length, binding); + D3D11_SUBRESOURCE_DATA bufferBytes = { GetDataFromIBuffer(buffer), 0, 0 }; + device->CreateBuffer(&bufferDescription, &bufferBytes, target); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::CreateUpdateVertexResourcesWorkerThread() +{ + HRESULT hr; + + m_hEventExitUpdateVertexResourcesWorkerThread = CreateEventW(NULL, TRUE, FALSE, NULL); + if (!m_hEventExitUpdateVertexResourcesWorkerThread) { + hr = HRESULT_FROM_WIN32(GetLastError()); + } + + m_hThread = CreateThread(NULL, 0, _ThreadProc, this, 0, &m_dwThreadID); + if (!m_hThread) { + hr = HRESULT_FROM_WIN32(GetLastError()); + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::ExitUpdateVertexResourcesWorkerThread() +{ + HRESULT hr = S_OK; + DWORD dwTimeout = INFINITE; + DWORD rc; + + if ((!m_hEventExitUpdateVertexResourcesWorkerThread) || (!m_hThread)) + return; + + rc = SignalObjectAndWait(m_hEventExitUpdateVertexResourcesWorkerThread, m_hThread, dwTimeout, FALSE); + switch (rc) { + case WAIT_OBJECT_0: + hr = S_OK; + break; + + case WAIT_ABANDONED: + case WAIT_IO_COMPLETION: + case WAIT_TIMEOUT: + hr = E_FAIL; + break; + + case WAIT_FAILED: + hr = HRESULT_FROM_WIN32(GetLastError()); + break; + } + + CloseHandle(m_hThread); + m_hThread = nullptr; + + CloseHandle(m_hEventExitUpdateVertexResourcesWorkerThread); + m_hEventExitUpdateVertexResourcesWorkerThread = nullptr; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +DWORD SurfaceMesh::UpdateVertexResourcesWorkerThread(void) +{ + const DWORD dwSleep = 0; + + for (;;) { + if (WaitForSingleObject(m_hEventExitUpdateVertexResourcesWorkerThread, dwSleep) == WAIT_OBJECT_0) { + break; + } + + SpatialSurfaceMesh^ surfaceMesh = nullptr; + ID3D11Device* device = nullptr; + + EnterCriticalSection(&m_csMeshResources); + if (m_smud.surfaceMesh != nullptr) { + surfaceMesh = std::move(m_smud.surfaceMesh); + device = m_smud.pd3dDevice; + + m_smud.surfaceMesh = nullptr; + m_smud.pd3dDevice = nullptr; + } + LeaveCriticalSection(&m_csMeshResources); + + if (surfaceMesh && device) { + UpdateVertexResourcesTask(device, surfaceMesh); + surfaceMesh = nullptr; + } + + Sleep(1); + } + + return 0; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +inline byte* GetPointerToBufferData(IBuffer^ buffer) +{ + ComPtr insp(reinterpret_cast(buffer)); + + // Query the IBufferByteAccess interface. + ComPtr bufferByteAccess; + insp.As(&bufferByteAccess); + + byte* pixels = nullptr; + bufferByteAccess->Buffer(&pixels); + return pixels; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::UpdateVertexResourcesTask(ID3D11Device* device, Windows::Perception::Spatial::Surfaces::SpatialSurfaceMesh^ surfaceMesh) +{ + // Before updating the meshes, check to ensure that there wasn't a more recent update. + auto meshUpdateTime = surfaceMesh->SurfaceInfo->UpdateTime; + + if (meshUpdateTime.UniversalTime <= m_lastUpdateTime.UniversalTime) + return; + + // Create new Direct3D device resources for the updated buffers. These will be set aside + // for now, and then swapped into the active slot next time the render loop is ready to draw. + + // First, we acquire the raw data buffers. + Windows::Storage::Streams::IBuffer^ positions = surfaceMesh->VertexPositions->Data; + Windows::Storage::Streams::IBuffer^ normals = surfaceMesh->VertexNormals->Data; + Windows::Storage::Streams::IBuffer^ indices = surfaceMesh->TriangleIndices->Data; + + // Then, we create Direct3D device buffers with the mesh data provided by HoloLens. + Microsoft::WRL::ComPtr updatedVertexPositions; + Microsoft::WRL::ComPtr updatedVertexNormals; + Microsoft::WRL::ComPtr updatedTriangleIndices; + + CreateDirectXBuffer(device, D3D11_BIND_VERTEX_BUFFER, positions, updatedVertexPositions.GetAddressOf()); + CreateDirectXBuffer(device, D3D11_BIND_VERTEX_BUFFER, normals, updatedVertexNormals.GetAddressOf()); + CreateDirectXBuffer(device, D3D11_BIND_INDEX_BUFFER, indices, updatedTriangleIndices.GetAddressOf()); + + { + CSLock lock(&m_csMeshResources); + + // Prepare to swap in the new meshes. + // Here, we use ComPtr.Swap() to avoid unnecessary overhead from ref counting. + m_updatedVertexPositions.Swap(updatedVertexPositions); + m_updatedVertexNormals.Swap(updatedVertexNormals); + m_updatedTriangleIndices.Swap(updatedTriangleIndices); + + // Cache properties for the buffers we will now use. + m_updatedMeshProperties.coordinateSystem = surfaceMesh->CoordinateSystem; + m_updatedMeshProperties.vertexPositionScale = surfaceMesh->VertexPositionScale; + m_updatedMeshProperties.vertexStride = surfaceMesh->VertexPositions->Stride; + m_updatedMeshProperties.normalStride = surfaceMesh->VertexNormals->Stride; + m_updatedMeshProperties.indexCount = surfaceMesh->TriangleIndices->ElementCount; + m_updatedMeshProperties.indexFormat = static_cast(surfaceMesh->TriangleIndices->Format); + + // Send a signal to the render loop indicating that new resources are available to use. + m_updateReady = true; + m_lastUpdateTime = meshUpdateTime; + m_loadingComplete = true; + + // + // retrieving and saving vertex data from the GPU is expensive. When debugging and + // to ease CPU and GPU impact, disable the below block if not part of investigation. +#ifndef DISABLE_SAVE_VERTEX_DATA + // + // save vertex data information + bool fSuccess = false; // pessimistic + auto bound = surfaceMesh->SurfaceInfo->TryGetBounds(m_baseCoordinateSystem); + if (bound != nullptr) { + // + // get the raw vertex information + unsigned int uVertexBufferSize = positions->Length; + unsigned int uNormalBufferSize = normals->Length; + unsigned int uIndexBufferSize = indices->Length; + + unsigned int uBufferSize = sizeof(SurfaceMeshStreamHeader) + uVertexBufferSize; + + // + // TODO: add normals and indices as needed + // uBufferSize += (uNormalBufferSize + uIndexBufferSize); + + m_memBuffer.LockRWAccess(); + + unsigned char* ptr = (unsigned char*)m_memBuffer.realloc(uBufferSize); + + if (ptr) { + SurfaceMeshStreamHeader* hdr = (SurfaceMeshStreamHeader*)ptr; + + memcpy(hdr->signature, (void*)SMSH_SIGNATURE, sizeof(hdr->signature)); + + hdr->scale = 1.26f / 504.0f; + hdr->center_x = bound->Value.Center.x; + hdr->center_y = bound->Value.Center.y; + hdr->center_z = bound->Value.Center.z; + hdr->orientation_x = bound->Value.Orientation.x; + hdr->orientation_y = bound->Value.Orientation.y; + hdr->orientation_z = bound->Value.Orientation.z; + hdr->orientation_w = bound->Value.Orientation.w; + + // + // copy vertices + hdr->uVertexBufferSize = uVertexBufferSize; + hdr->uNumVertices = (uVertexBufferSize / sizeof(short)) / 4; // one vertex equals to 4 positions of type short + memcpy(ptr + sizeof(SurfaceMeshStreamHeader), GetPointerToBufferData(positions), uVertexBufferSize); + + // + // TODO: copy normals and indices as needed + //hdr->uNormalBufferSize = uNormalBufferSize; + //hdr->uNumNormals = (uNormalBufferSize / sizeof(char)); + // memcpy(ptr + sizeof(SurfaceMeshStreamHeader) + uVertexBufferSize, GetPointerToBufferData(normals), uNormalBufferSize); + + //hdr->uIndexBufferSize = uIndexBufferSize; + //hdr->uNumIndices = (uIndexBufferSize / sizeof(short)); + // memcpy(ptr + sizeof(SurfaceMeshStreamHeader) + uVertexBufferSize, GetPointerToBufferData(indices), uIndexBufferSize); + + fSuccess = true; + } + + m_memBuffer.UnlockRWAccess(); + } +#else // #ifndef DISABLE_SAVE_VERTEX_DATA + #pragma message("====================== saving vertex data disabled ======================") +#endif // #ifndef DISABLE_SAVE_VERTEX_DATA + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::UpdateVertexResources(ID3D11Device * device) +{ + SpatialSurfaceMesh^ surfaceMesh = std::move(m_pendingSurfaceMesh); + + if (!surfaceMesh || surfaceMesh->TriangleIndices->ElementCount < 3) + { + // Not enough indices to draw a triangle. + return; + } + + CSLock lock(&m_csMeshResources); + + // + // update data block for worker thread to pick up + m_smud.surfaceMesh = surfaceMesh; // std::move(surfaceMesh); + m_smud.pd3dDevice = device; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::CreateDeviceDependentResources(ID3D11Device * device) +{ + UpdateVertexResources(device); + + // Create a constant buffer to control mesh position. + CD3D11_BUFFER_DESC constantBufferDesc(sizeof(ModelNormalConstantBuffer), D3D11_BIND_CONSTANT_BUFFER); + DX::ThrowIfFailed( + device->CreateBuffer(&constantBufferDesc, nullptr, &m_modelTransformBuffer + ) + ); + + m_constantBufferCreated = true; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::ReleaseVertexResources() +{ + m_pendingSurfaceMesh = nullptr; + + m_meshProperties = {}; + m_vertexPositions.Reset(); + m_vertexNormals.Reset(); + m_triangleIndices.Reset(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::SwapVertexBuffers() +{ + // Swap out the previous vertex position, normal, and index buffers, and replace + // them with up-to-date buffers. + m_vertexPositions = m_updatedVertexPositions; + m_vertexNormals = m_updatedVertexNormals; + m_triangleIndices = m_updatedTriangleIndices; + + // Swap out the metadata: index count, index format, . + m_meshProperties = m_updatedMeshProperties; + + m_updatedMeshProperties = {}; + m_updatedVertexPositions.Reset(); + m_updatedVertexNormals.Reset(); + m_updatedTriangleIndices.Reset(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SurfaceMesh::ReleaseDeviceDependentResources() +{ + // Clear out any pending resources. + SwapVertexBuffers(); + + // Clear out active resources. + ReleaseVertexResources(); + + m_modelTransformBuffer.Reset(); + + m_constantBufferCreated = false; + m_loadingComplete = false; +} diff --git a/windows/MSRHoloLensSpatialMapping/Content/SurfaceMesh.h b/windows/MSRHoloLensSpatialMapping/Content/SurfaceMesh.h new file mode 100644 index 0000000..91c3002 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SurfaceMesh.h @@ -0,0 +1,150 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include "Common\DeviceResources.h" +#include "ShaderStructures.h" +#include + +using namespace Windows::Storage::Streams; + +namespace HoloLensNavigation +{ + #define SMSH_SIGNATURE "SMSHSIG_" + + struct SurfaceMeshStreamHeader + { + unsigned char signature[8]; + float scale; + float center_x; + float center_y; + float center_z; + float orientation_x; + float orientation_y; + float orientation_z; + float orientation_w; + unsigned int uVertexBufferSize; + unsigned int uNumVertices; + }; + + struct SurfaceMeshProperties + { + Windows::Perception::Spatial::SpatialCoordinateSystem^ coordinateSystem = nullptr; + Windows::Foundation::Numerics::float3 vertexPositionScale = Windows::Foundation::Numerics::float3::one(); + unsigned int vertexStride = 0; + unsigned int normalStride = 0; + unsigned int indexCount = 0; + DXGI_FORMAT indexFormat = DXGI_FORMAT_UNKNOWN; + }; + + struct SurfaceMeshUpdateData + { + Windows::Perception::Spatial::Surfaces::SpatialSurfaceMesh^ surfaceMesh; + ID3D11Device* pd3dDevice; + }; + + + class SurfaceMesh final + { + public: + SurfaceMesh(); + ~SurfaceMesh(); + + void UpdateSurface(Windows::Perception::Spatial::Surfaces::SpatialSurfaceMesh^ surface); + void UpdateTransform( + ID3D11Device* device, + ID3D11DeviceContext* context, + DX::StepTimer const& timer, + Windows::Perception::Spatial::SpatialCoordinateSystem^ baseCoordinateSystem + ); + + void Draw(ID3D11Device* device, ID3D11DeviceContext* context, bool usingVprtShaders, bool isStereo); + + void UpdateVertexResources(ID3D11Device* device); + void CreateDeviceDependentResources(ID3D11Device* device); + void ReleaseVertexResources(); + void ReleaseDeviceDependentResources(); + + const bool& GetIsActive() const { return m_isActive; } + const float& GetLastActiveTime() const { return m_lastActiveTime; } + const Windows::Foundation::DateTime& GetLastUpdateTime() const { return m_lastUpdateTime; } + + void SetIsActive(const bool& isActive) { m_isActive = isActive; } + void SetColorFadeTimer(const float& duration) { m_colorFadeTimeout = duration; m_colorFadeTimer = 0.f; } + + public: + _inline StaticMemoryBuffer& GetVertexMemoryBuffer(void) { return m_memBuffer; } + + private: + void SwapVertexBuffers(); + void CreateDirectXBuffer( + ID3D11Device* device, + D3D11_BIND_FLAG binding, + Windows::Storage::Streams::IBuffer^ buffer, + ID3D11Buffer** target + ); + + Windows::Perception::Spatial::Surfaces::SpatialSurfaceMesh^ m_pendingSurfaceMesh = nullptr; + + Microsoft::WRL::ComPtr m_vertexPositions; + Microsoft::WRL::ComPtr m_vertexNormals; + Microsoft::WRL::ComPtr m_triangleIndices; + Microsoft::WRL::ComPtr m_updatedVertexPositions; + Microsoft::WRL::ComPtr m_updatedVertexNormals; + Microsoft::WRL::ComPtr m_updatedTriangleIndices; + Microsoft::WRL::ComPtr m_modelTransformBuffer; + + Windows::Foundation::DateTime m_lastUpdateTime; + + SurfaceMeshProperties m_meshProperties; + SurfaceMeshProperties m_updatedMeshProperties; + + ModelNormalConstantBuffer m_constantBufferData; + + bool m_constantBufferCreated = false; + bool m_loadingComplete = false; + bool m_updateReady = false; + bool m_isActive = false; + float m_lastActiveTime = -1.f; + float m_colorFadeTimer = -1.f; + float m_colorFadeTimeout = -1.f; + + private: + Windows::Perception::Spatial::SpatialCoordinateSystem^ m_baseCoordinateSystem = nullptr; + + // + // we'll use a memory buffer which only grows in size as needed. Because we collect + // vertex data over and over, re-using previously allocated memory will improve + // performance. + StaticMemoryBuffer m_memBuffer; + SurfaceMeshStreamHeader m_emptySMSH; + + // + // To increase overall performance, use a worker thread to update new vertex, normal and index buffers. + static DWORD WINAPI _ThreadProc(LPVOID lpParameter) { return ((SurfaceMesh*)lpParameter)->UpdateVertexResourcesWorkerThread(); } + DWORD UpdateVertexResourcesWorkerThread(void); + void CreateUpdateVertexResourcesWorkerThread(); + void ExitUpdateVertexResourcesWorkerThread(); + void UpdateVertexResourcesTask(ID3D11Device* device, Windows::Perception::Spatial::Surfaces::SpatialSurfaceMesh^ surfaceMesh); + + HANDLE m_hThread; + DWORD m_dwThreadID; + HANDLE m_hEventExitUpdateVertexResourcesWorkerThread; + + CRITICAL_SECTION m_csMeshResources; + CRITICAL_SECTION m_csUpdateVertexResources; + bool m_fClosing; + + SurfaceMeshUpdateData m_smud; + }; + +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/Content/SurfaceVPRTVertexShader.hlsl b/windows/MSRHoloLensSpatialMapping/Content/SurfaceVPRTVertexShader.hlsl new file mode 100644 index 0000000..ac960d7 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SurfaceVPRTVertexShader.hlsl @@ -0,0 +1,82 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +// A constant buffer that stores per-mesh data. +cbuffer ModelConstantBuffer : register(b0) +{ + float4x4 modelToWorld; + min16float4x4 normalToWorld; + min16float4 colorFadeFactor; +}; + +// A constant buffer that stores each set of view and projection matrices in column-major format. +cbuffer ViewProjectionConstantBuffer : register(b1) +{ + float4 cameraPosition; + float4 lightPosition; + float4x4 viewProjection[2]; +}; + +// Per-vertex data used as input to the vertex shader. +struct VertexShaderInput +{ + min16float3 pos : POSITION; + min16float3 norm : NORMAL0; + uint instId : SV_InstanceID; +}; + +// Per-vertex data passed to the geometry shader. +// Note that the render target array index is set here in the vertex shader. +struct VertexShaderOutput +{ + min16float4 screenPos : SV_POSITION; + min16float3 worldPos : POSITION0; + min16float3 worldNorm : NORMAL0; + min16float3 color : COLOR0; + uint idx : TEXCOORD0; + uint rtvId : SV_RenderTargetArrayIndex; // SV_InstanceID % 2 +}; + +// Simple shader to do vertex processing on the GPU. +VertexShaderOutput main(VertexShaderInput input) +{ + VertexShaderOutput output; + float4 pos = float4(input.pos, 1.0f); + + // Note which view this vertex has been sent to. Used for matrix lookup. + // Taking the modulo of the instance ID allows geometry instancing to be used + // along with stereo instanced drawing; in that case, two copies of each + // instance would be drawn, one for left and one for right. + int idx = input.instId % 2; + + // Transform the vertex position into world space. + pos = mul(pos, modelToWorld); + + // Store the world position. + output.worldPos = (min16float3)pos; + + // Correct for perspective and project the vertex position onto the screen. + pos = mul(pos, viewProjection[idx]); + output.screenPos = (min16float4)pos; + + // Pass a color. + output.color = min16float3(1.f, 1.f, 1.f); + + // Set the render target array index. + output.rtvId = idx; + output.idx = idx; + + // Compute the normal. + min16float4 normalVector = min16float4(input.norm, min16float(0.f)); + output.worldNorm = normalize((min16float3)mul(normalVector, normalToWorld)); + + return output; +} diff --git a/windows/MSRHoloLensSpatialMapping/Content/SurfaceVertexShader.hlsl b/windows/MSRHoloLensSpatialMapping/Content/SurfaceVertexShader.hlsl new file mode 100644 index 0000000..b384ae2 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Content/SurfaceVertexShader.hlsl @@ -0,0 +1,82 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +// A constant buffer that stores per-mesh data. +cbuffer ModelConstantBuffer : register(b0) +{ + float4x4 modelToWorld; + min16float4x4 normalToWorld; + min16float4 colorFadeFactor; +}; + +// A constant buffer that stores each set of view and projection matrices in column-major format. +cbuffer ViewProjectionConstantBuffer : register(b1) +{ + float4 cameraPosition; + float4 lightPosition; + float4x4 viewProjection[2]; +}; + +// Per-vertex data used as input to the vertex shader. +struct VertexShaderInput +{ + min16float3 pos : POSITION; + min16float3 norm : NORMAL0; + uint instId : SV_InstanceID; +}; + +// Per-vertex data passed to the geometry shader. +// Note that the render target array index will be set by the geometry shader +// using the value of viewId. +struct VertexShaderOutput +{ + min16float4 screenPos : SV_POSITION; + min16float3 worldPos : POSITION0; + min16float3 worldNorm : NORMAL0; + min16float3 color : COLOR0; + uint viewId : TEXCOORD0; // SV_InstanceID % 2 +}; + +// Simple shader to do vertex processing on the GPU. +VertexShaderOutput main(VertexShaderInput input) +{ + VertexShaderOutput output; + float4 pos = float4(input.pos, 1.0f); + + // Note which view this vertex has been sent to. Used for matrix lookup. + // Taking the modulo of the instance ID allows geometry instancing to be used + // along with stereo instanced drawing; in that case, two copies of each + // instance would be drawn, one for left and one for right. + int idx = input.instId % 2; + + // Transform the vertex position into world space. + pos = mul(pos, modelToWorld); + + // Store the world position. + output.worldPos = (min16float3)pos; + + // Correct for perspective and project the vertex position onto the screen. + pos = mul(pos, viewProjection[idx]); + output.screenPos = (min16float4)pos; + + // Pass a color. + output.color = min16float3(1.f, 1.f, 1.f); + + // Set the instance ID. The pass-through geometry shader will set the + // render target array index to whatever value is set here. + output.viewId = idx; + + // Compute the normal. + min16float4 normalVector = min16float4(input.norm, min16float(0.f)); + output.worldNorm = normalize((min16float3)mul(normalVector, normalToWorld)); + + return output; +} diff --git a/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.cpp b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.cpp new file mode 100644 index 0000000..be43df5 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.cpp @@ -0,0 +1,641 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" +#include "MSRHololensSpatialMappingMain.h" +#include "Common\DirectXHelper.h" + +#include +#include + +using namespace HoloLensNavigation; + +using namespace concurrency; +using namespace Microsoft::WRL; +using namespace Platform; +using namespace Windows::Foundation::Collections; +using namespace Windows::Foundation::Numerics; +using namespace Windows::Graphics::Holographic; +using namespace Windows::Networking; +using namespace Windows::Perception::Spatial; +using namespace Windows::System::Threading; + +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +Windows::Networking::Sockets::StreamSocketListener^ HoloLensNavigationMain::StartListener(ListenerContext::OnConnectionEvent^ onConnectionEvent, Platform::String^ port) +{ + Sockets::StreamSocketListener^ listener = ref new Sockets::StreamSocketListener(); + listener->ConnectionReceived += ref new Windows::Foundation::TypedEventHandler(onConnectionEvent); + listener->Control->KeepAlive = false; + + create_task(listener->BindServiceNameAsync(port)).then([this](task previousTask) + { + try + { + // Try getting an exception. + previousTask.get(); + } + catch (COMException^ exception) + { + // HRESULT:0x80072740 Only one usage of each socket address (protocol/network address/port) is normally permitted. + if (exception->HResult == 0x80072740) + { + throw; + } + } + catch (Exception^ exception) + { + } + }); + + return listener; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void ListenerContext::OnConnection(Windows::Networking::Sockets::StreamSocketListener^ listener, Windows::Networking::Sockets::StreamSocketListenerConnectionReceivedEventArgs^ object) +{ + DebugMsgW(L"Socket connected"); + + m_writer = ref new Windows::Storage::Streams::DataWriter(object->Socket->OutputStream); + m_reader = ref new Windows::Storage::Streams::DataReader(object->Socket->InputStream); + m_socket = object->Socket; + + m_mappingmain->m_internalState = InternalStatus::READY_TO_RECEIVE; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +bool HoloLensNavigationMain::InternalUpdate(HolographicFrame^ holographicFrame) +{ + HolographicFramePrediction^ prediction = holographicFrame->CurrentPrediction; + SpatialCoordinateSystem^ stationaryCoordinateSystem = m_stationaryReferenceFrame->CoordinateSystem; + SpatialCoordinateSystem^ currentCoordinateSystem = m_referenceFrame->GetStationaryCoordinateSystemAtTimestamp(prediction->Timestamp); + + if ((m_internalState != PROCESSING) && (m_internalState != UNCONNECTED)) { + return false; + } + + // create new anchor + EnumAskedState tempState = EnumAskedState::Default_Ask_Pose; + if (m_askedState != EnumAskedState::Default_Ask_Pose) { + // for async processing of data receiving + // should be checked the connection timing + m_replyedState = EnumRepliedState::Reply_Anchor_Pose_Map; + tempState = m_askedState; + m_askedState = EnumAskedState::Default_Ask_Pose; + } + + // HoloLens pose update + auto cameraPose = prediction->CameraPoses->GetAt(0); + Platform::IBox^ viewTransformContainerStatic = cameraPose->TryGetViewTransform(stationaryCoordinateSystem); + bool viewTransformAcquired = viewTransformContainerStatic != nullptr; + + if (viewTransformAcquired) //get Camera pose in m_baseAnchor coordinate system, + { + HolographicStereoTransform viewCoordinateSystemTransform = viewTransformContainerStatic->Value; + float4x4 camPosition = viewCoordinateSystemTransform.Left; + + if (m_baseAnchor != nullptr) { + float4x4 anchorSpaceToCurrentCoordinateSystem; + SpatialCoordinateSystem^ anchorSpace = m_baseAnchor->CoordinateSystem; + const auto tryTransform = anchorSpace->TryGetTransformTo(stationaryCoordinateSystem); + + if (tryTransform != nullptr) + { + anchorSpaceToCurrentCoordinateSystem = tryTransform->Value; + camPosition = anchorSpaceToCurrentCoordinateSystem * camPosition;//->camPotition: base coordinates(spatial anchor coordinates)2camera coordinates + } + } + + bool invertible = Windows::Foundation::Numerics::invert(camPosition, &m_curPositionFromAnchor); + + // distance from anchor to camera check. + // base anchor is newly created or changed another one created before + if (invertible) + { + float xp = m_curPositionFromAnchor.m41, yp = m_curPositionFromAnchor.m42, zp = m_curPositionFromAnchor.m43; + float thres = 4.0 * 4.0; + + if (m_baseAnchor != nullptr && (xp * xp + yp * yp + zp * zp > thres)) { + // get anchor map + auto anchorMap = m_spatialAnchorHelper->GetAnchorMap(); + double minimumerr = -1; + IKeyValuePair^ bestPair; + + for each (auto & pair in anchorMap) { + SpatialAnchor^ candidateAnchor = pair->Value; + float4x4 anchorSpaceToCurrentCoordinateSystem; + SpatialCoordinateSystem^ anchorSpace = candidateAnchor->CoordinateSystem; + const auto tryTransform = anchorSpace->TryGetTransformTo(stationaryCoordinateSystem); + float4x4 camPos_inAnchor; + + if (tryTransform != nullptr) + { + anchorSpaceToCurrentCoordinateSystem = tryTransform->Value; + camPos_inAnchor = anchorSpaceToCurrentCoordinateSystem * viewCoordinateSystemTransform.Left;//->camPotition: base coordinates(spatial anchor coordinates)2camera coordinates + float4x4 camPosInv; + bool invertible_ = Windows::Foundation::Numerics::invert(camPos_inAnchor, &camPosInv); + + if (invertible_) { + float xp = camPosInv.m41, yp = camPosInv.m42, zp = camPosInv.m43; + double err = xp * xp + yp * yp + zp * zp; + + if (err < minimumerr || minimumerr < 0) { + bestPair = pair; + minimumerr = err; + } + } + } + } + + if (minimumerr > 0 && minimumerr < thres) { + // update anchor position pre-created + m_baseAnchor = bestPair->Value; + m_AnchorKey = bestPair->Key; + m_replyedState = EnumRepliedState::Reply_Anchor_Pose; + } + else { + // create new anchor and rendering map + m_replyedState = EnumRepliedState::Reply_Anchor_Pose_Map; + } + } + } + + // new anchor create + if (m_replyedState == EnumRepliedState::Reply_Anchor_Pose_Map) { + Platform::IBox^ viewTransformContainer = cameraPose->TryGetViewTransform(currentCoordinateSystem); + bool viewTransformAcquired = viewTransformContainer != nullptr; + + if (viewTransformAcquired) + { + HolographicStereoTransform viewCoordinateSystemTransform = viewTransformContainer->Value; + float4x4 camPosition2 = viewCoordinateSystemTransform.Left; + float4x4 viewInverse; + bool invertible = Windows::Foundation::Numerics::invert(camPosition2, &viewInverse); + + if (invertible) + { + const float3 campos(viewInverse.m41, viewInverse.m42, viewInverse.m43); + float rad = sqrt(viewInverse.m31 * viewInverse.m31 + viewInverse.m33 * viewInverse.m33); + float theta = acos(viewInverse.m33 / rad); + theta = viewInverse.m31 < 0 ? -theta : theta; + const float3 camdirection(viewInverse.m31 / rad, 0, viewInverse.m33 / rad); + const quaternion q(0, sin(theta / 2), 0, cos(theta / 2)); + SpatialAnchor^ anchor = SpatialAnchor::TryCreateRelativeTo(currentCoordinateSystem, campos, q); + + if ((anchor != nullptr) && (m_spatialAnchorHelper != nullptr)) + { + if (tempState == EnumAskedState::Refresh_Anchor_And_Render_Map) + { + m_spatialAnchorHelper->ClearAnchorStore(); + m_spatialId = 0; + } + else { + m_spatialId++; + } + + auto anchorMap = m_spatialAnchorHelper->GetAnchorMap(); + + // Create an identifier for the anchor. + std::wstringstream ss; + ss << "anchor_" << m_spatialId; + std::wstring w_char = ss.str(); + + m_AnchorKey = ref new String(w_char.c_str()); + + if (m_fVerbose) { + OutputDebugStringA((std::string("State: ") + std::to_string(m_internalState) + "\n").c_str()); + OutputDebugStringA((std::string("Anchor: ") + std::to_string(m_spatialId) + "\n").c_str()); + } + + SaveAppState(); + + { + if (m_baseAnchor == nullptr) { + m_baseAnchor = anchor; + } + const auto tryTransform = anchor->CoordinateSystem->TryGetTransformTo(m_baseAnchor->CoordinateSystem); + m_initNewAnchorPositionFromPrevAnchor = tryTransform->Value; + m_baseAnchor = anchor; + currentCoordinateSystem = m_baseAnchor->CoordinateSystem; + anchorMap->Insert(m_AnchorKey->ToString(), anchor); + m_replyedState = EnumRepliedState::Reply_Anchor_Pose_Map; + } + } + } + + } + } + + Windows::Foundation::Numerics::invert(camPosition, &m_curPositionFromAnchor); + } + + return true; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::CollectSpatialMappinginformation() +{ + { + m_meshRenderer->CollectVertexData(); + + StaticMemoryBuffer& memBuffer = m_meshRenderer->GetVertexDataCollectionBuffer(); + + memBuffer.LockRWAccess(); + + unsigned int uBufferSize = memBuffer.getBufferSize(); + unsigned char * ptr = (unsigned char* )memBuffer.getPointer(); + + if ((uBufferSize > 0) && (ptr != nullptr)) { + double holoHeight; + Eigen::Vector3d holo2floor; + + // + // FloorDetection() is very CPU intensive. When debugging and to ease CPU impact, disable the + // call and assign temporary fixed values. +#ifndef DISABLE_FLOORDETECTION + FloorDetection(ptr, uBufferSize, holoHeight, holo2floor); +#else // #ifndef DISABLE_FLOORDETECTION + #pragma message("====================== FloorDetection() disabled ======================") + holoHeight = -1.31; + holo2floor << -0.163, 0.345, 1.251; +#endif // #ifndef DISABLE_FLOORDETECTION + + if (m_fVerbose) { + OutputDebugStringA((std::string("Height and floor: ") + std::to_string(holoHeight) + "," + + std::to_string(holo2floor(0)) + "," + + std::to_string(holo2floor(1)) + "," + + std::to_string(holo2floor(2)) + "," + + "\n").c_str()); + } + + m_floorAndHeight[0] = (float)holo2floor(0); + m_floorAndHeight[1] = (float)holo2floor(1); + m_floorAndHeight[2] = (float)holo2floor(2); + m_floorAndHeight[3] = (float)holoHeight; + } + + memBuffer.UnlockRWAccess(); + } + + if (m_internalState == PROCESSING) { + m_internalState = READY_TO_SEND; + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::LoadAnchorStore() +{ + m_spatialAnchorHelper = create_task(SpatialAnchorManager::RequestStoreAsync()) + .then([](task previousTask) + { + std::shared_ptr newHelper = nullptr; + + + try + { + SpatialAnchorStore^ anchorStore = previousTask.get(); + newHelper = std::shared_ptr(new SpatialAnchorHelper(anchorStore)); + newHelper->LoadFromAnchorStore(); + + } + catch (Exception^ exception) + { + OutputDebugStringA((std::string(__FUNCTION__) + " " + std::to_string(__LINE__) + "\n").c_str()); + OutputDebugStringW(exception->Message->Data()); + OutputDebugStringA("\n"); + } + + // Return the initialized class instance. + return newHelper; + }).get(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::StateReceiver() +{ + if (m_internalState != READY_TO_RECEIVE) + return; + + // receive data first, then send data + m_internalState = InternalStatus::RECEIVING; + + DataReader^ reader = m_listenercontext->m_reader; + Windows::Networking::Sockets::StreamSocket^ socket = m_listenercontext->m_socket; + + create_task(reader->LoadAsync(sizeof(UINT32))).then([this, reader, socket](unsigned int size) + { + if (size < sizeof(UINT32)) + { + // The underlying socket was closed before we were able to read the whole data. + cancel_current_task(); + } + + std::lock_guard guard(m_listenercontext->m_sockLock); + int receivedValue = -1; + + try + { + receivedValue = m_listenercontext->m_reader->ReadInt32(); + } + catch (Platform::COMException^ exception) + { + DebugMsgW(L"exception reading socket buffer!"); + return; + } + + this->m_askedState = EnumAskedState(receivedValue); + this->m_internalState = InternalStatus::PROCESSING; + + }).then([this](task readTask) + { + try + { + // Try getting an exception. + readTask.get(); + } + catch (Platform::COMException^ exception) + { + this->m_listenercontext->m_reader = nullptr; + this->m_listenercontext->m_writer = nullptr; + this->m_listenercontext->m_socket = nullptr; + } + catch (task_canceled&) + { + // this will usually happen because user closed the client socket. + + // Explicitly close the socket. + delete this->m_listenercontext->m_socket; + + this->m_listenercontext->m_reader = nullptr; + this->m_listenercontext->m_writer = nullptr; + this->m_listenercontext->m_socket = nullptr; + } + }); + + return; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::StateSender() +{ + if (m_internalState != READY_TO_SEND) + return; + + m_listenercontext->m_writer->WriteInt32(m_replyedState); + + switch (m_replyedState) { + case EnumRepliedState::Reply_Pose: + SendPose(); + break; + + case EnumRepliedState::Reply_Anchor_Pose: + SendAnchorID(); + SendPose(); + break; + + case EnumRepliedState::Reply_Anchor_Pose_Map: + SendInitAnchorPose(); + SendPointCloud(); + SendAnchorID(); + SendPose(); + break; + + default: + return; + } + + this->m_internalState = InternalStatus::SENDING; + + create_task(m_listenercontext->m_writer->StoreAsync()).then([this](task writeTask) + { + std::lock_guard guard(m_listenercontext->m_sockLock); + try + { + writeTask.get(); + this->m_internalState = InternalStatus::READY_TO_RECEIVE; + } + catch (Platform::COMException^ exception) + { + OutputDebugStringA((std::string(__FUNCTION__) + " " + std::to_string(__LINE__) + "\n").c_str()); + OutputDebugStringW(exception->Message->Data()); + OutputDebugStringA("\n"); + m_listenercontext->m_reader = nullptr; + m_listenercontext->m_writer = nullptr; + m_listenercontext->m_socket = nullptr; + } + }); + + m_replyedState = Reply_Pose; + + return; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::SendPose() +{ + Platform::Array^ buffer = ref new Platform::Array(sizeof(float) * 20); + float datamat[20] = + { m_curPositionFromAnchor.m11, m_curPositionFromAnchor.m12, m_curPositionFromAnchor.m13, m_curPositionFromAnchor.m14, + m_curPositionFromAnchor.m21, m_curPositionFromAnchor.m22, m_curPositionFromAnchor.m23, m_curPositionFromAnchor.m24, + m_curPositionFromAnchor.m31, m_curPositionFromAnchor.m32, m_curPositionFromAnchor.m33, m_curPositionFromAnchor.m34, + m_curPositionFromAnchor.m41, m_curPositionFromAnchor.m42, m_curPositionFromAnchor.m43, m_curPositionFromAnchor.m44, + m_floorAndHeight[0], m_floorAndHeight[1], m_floorAndHeight[2], m_floorAndHeight[3] }; + void* p = datamat; + + memcpy(buffer->Data, p, sizeof(float) * 20); + + m_listenercontext->m_writer->WriteBytes(buffer); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::SendInitAnchorPose() +{ + Platform::Array^ buffer = ref new Platform::Array(sizeof(float) * 16); + float datamat[16] = + { m_initNewAnchorPositionFromPrevAnchor.m11, m_initNewAnchorPositionFromPrevAnchor.m12, m_initNewAnchorPositionFromPrevAnchor.m13, m_initNewAnchorPositionFromPrevAnchor.m14, + m_initNewAnchorPositionFromPrevAnchor.m21, m_initNewAnchorPositionFromPrevAnchor.m22, m_initNewAnchorPositionFromPrevAnchor.m23, m_initNewAnchorPositionFromPrevAnchor.m24, + m_initNewAnchorPositionFromPrevAnchor.m31, m_initNewAnchorPositionFromPrevAnchor.m32, m_initNewAnchorPositionFromPrevAnchor.m33, m_initNewAnchorPositionFromPrevAnchor.m34, + m_initNewAnchorPositionFromPrevAnchor.m41, m_initNewAnchorPositionFromPrevAnchor.m42, m_initNewAnchorPositionFromPrevAnchor.m43, m_initNewAnchorPositionFromPrevAnchor.m44, }; + void* p = datamat; + + memcpy(buffer->Data, p, sizeof(float) * 16); + + m_listenercontext->m_writer->WriteBytes(buffer); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::SendAnchorID() +{ + m_listenercontext->m_writer->WriteInt32(m_listenercontext->m_writer->MeasureString(m_AnchorKey)); + + m_listenercontext->m_writer->WriteString(m_AnchorKey); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::SendPointCloud() +{ + StaticMemoryBuffer& memBuffer = m_meshRenderer->GetVertexDataCollectionBuffer(); + + memBuffer.LockRWAccess(); + + unsigned int uBufferSize = memBuffer.getBufferSize(); + unsigned char* ptr = (unsigned char*)memBuffer.getPointer(); + + m_listenercontext->m_writer->WriteInt32(uBufferSize); + m_listenercontext->m_writer->WriteBytes(Platform::ArrayReference(ptr, uBufferSize)); + + // DebugMsgW(L"======================= sent total buffer size: %u", uBufferSize); + + memBuffer.UnlockRWAccess(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::FloorDetection(unsigned char* buffer, unsigned int uBufferSize, double& HoloHeight, Eigen::Vector3d& floorpt) +{ + //// get plane and Height + //// around 1m x 1m + + //// ransac + int ransac_max = 100; + std::vector points; + std::vector indices; + Eigen::Vector3d bestn, bestp; + unsigned int cnt = 0; + int maxcnt = -1; + unsigned int uBufferPosition = 0; + + bestn << 1, 1, 1; + + while (true) + { + SurfaceMeshStreamHeader* hdr = (SurfaceMeshStreamHeader*)(buffer); + + if (memcmp(hdr->signature, (void*)SMSH_SIGNATURE, sizeof(hdr->signature)) != 0) { + DebugMsgW(L"FloorDetection() buffer misalignment at offset %u!\r\n", uBufferPosition); + break; + } + + Eigen::Vector3d center; + Eigen::Quaterniond orientation = Eigen::Quaterniond(hdr->orientation_w, hdr->orientation_x, hdr->orientation_y, hdr->orientation_z); + unsigned int uNumVertices = hdr->uNumVertices; + + center << hdr->center_x, hdr->center_y, hdr->center_z; + + if (false) { + OutputDebugStringA((std::to_string(uNumVertices) + "\n").c_str()); + } + + buffer += sizeof(SurfaceMeshStreamHeader); + + short * vertexBuffer = (short *)buffer; + for (unsigned int i = 0; i < uNumVertices; i++) + { + Eigen::Vector3d p; + p << vertexBuffer[i * 4] * 0.0025f, vertexBuffer[i * 4 + 1] * 0.0025f, vertexBuffer[i * 4 + 2] * 0.0025f; + p = orientation * p + center; + + if ((p(0) < 1.0) && (p(0) > -1.0) && (p(1) < 1.0) && (p(1) > -1.0) && (p(2) < 0)) { + points.push_back(p); + indices.push_back(cnt); + cnt++; + } + } + + buffer += hdr->uVertexBufferSize;; + uBufferPosition += sizeof(SurfaceMeshStreamHeader) + hdr->uVertexBufferSize; + if (uBufferPosition >= uBufferSize) + break; + } + + if (points.size() < 50) + return; + + std::vector bestlist; + for (int ransac_t = 0; ransac_t < ransac_max; ransac_t++) { + random_shuffle(indices.begin(), indices.end()); + + std::vector candlist; + Eigen::Vector3d v01, v02, nfloor, cand_p; + int numInliers = 0; + + cand_p = points.at(indices.at(0)); + v01 = points.at(indices.at(1)) - cand_p; + v02 = points.at(indices.at(2)) - cand_p; + nfloor = v01.cross(v02); + nfloor = nfloor.normalized(); + + for (int idx = 3; idx < (int)indices.size(); idx++) { + Eigen::Vector3d targp = points.at(indices.at(idx)) - cand_p; + double err = abs(targp.dot(nfloor)); + + if (err < 0.005) { + //inlier + candlist.push_back(indices.at(idx)); + numInliers++; + } + } + + if (maxcnt < numInliers) { + maxcnt = numInliers; + bestn = nfloor; + bestp = cand_p; + bestlist = std::vector(candlist); + } + } + + // plane fitting + // solve least square problem + // ax+by+z+d=0: ax+by+d=-z + Eigen::MatrixXd A(bestlist.size(), 3); + Eigen::VectorXd B(bestlist.size()); + + for (int idx = 0; idx < (int)bestlist.size(); idx++) { + Eigen::Vector3d targp = points.at(indices.at(idx)); + + A(idx, 0) = targp(0); // x + A(idx, 1) = targp(1); // y + A(idx, 2) = 1; // 1 + B(idx) = -targp(2); // -z + } + + Eigen::Vector3d ansX = A.jacobiSvd(Eigen::ComputeThinU | Eigen::ComputeThinV).solve(B); + + // rendering: 1.0m upper from hololens - 2.0m lower from hololens (3.0m range) + // hololens point (scale*(dwidth/2),scale*(dwidth/2),1.0) + // ax+by+z+d=0: n< + + + + + + + + + + + {b65d7ef6-4502-4ff4-9c74-942a42e2db5e} + HolographicApp + MSRHoloLensSpatialMapping + en-US + 14.0 + true + Windows Store + 10.0 + 10.0.19041.0 + 10.0 + true + + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + Debug + ARM + + + Release + ARM + + + Debug + ARM64 + + + Release + ARM64 + + + + Application + true + + + Application + true + + + Application + true + + + Application + true + + + Application + false + true + + + Application + false + true + + + Application + false + true + + + Application + false + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + MSRHoloLensSpatialMapping_TemporaryKey.pfx + + + $(SolutionDir)bin\$(Platform)\$(Configuration)\ + $(SolutionDir)build\$(Platform)\$(Configuration)\ + + + $(SolutionDir)bin\$(Platform)\$(Configuration)\ + $(SolutionDir)build\$(Platform)\$(Configuration)\ + + + $(SolutionDir)bin\$(Platform)\$(Configuration)\ + $(SolutionDir)build\$(Platform)\$(Configuration)\ + + + $(SolutionDir)bin\$(Platform)\$(Configuration)\ + $(SolutionDir)build\$(Platform)\$(Configuration)\ + + + $(SolutionDir)bin\$(Platform)\$(Configuration)\ + $(SolutionDir)build\$(Platform)\$(Configuration)\ + + + $(SolutionDir)bin\$(Platform)\$(Configuration)\ + $(SolutionDir)build\$(Platform)\$(Configuration)\ + + + $(SolutionDir)bin\$(Platform)\$(Configuration)\ + $(SolutionDir)build\$(Platform)\$(Configuration)\ + + + $(SolutionDir)bin\$(Platform)\$(Configuration)\ + $(SolutionDir)build\$(Platform)\$(Configuration)\ + + + + d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies); + %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib + mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries) + $(OutDir)$(TargetName)$(TargetExt) + + + pch.h + $(IntDir)pch.pch + $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories) + /bigobj %(AdditionalOptions) + 4453;28204 + _DEBUG;%(PreprocessorDefinitions) + + + + + d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies); + %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib + mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries) + $(OutDir)$(TargetName)$(TargetExt) + + + pch.h + $(IntDir)pch.pch + $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories) + /bigobj %(AdditionalOptions) + 4453;28204 + _DEBUG;%(PreprocessorDefinitions) + + + + + d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies); + %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib + mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries) + $(OutDir)$(TargetName)$(TargetExt) + + + pch.h + $(IntDir)pch.pch + $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories) + /bigobj %(AdditionalOptions) + 4453;28204 + _DEBUG;%(PreprocessorDefinitions) + + + + + d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies); + %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib + mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries) + $(OutDir)$(TargetName)$(TargetExt) + + + pch.h + $(IntDir)pch.pch + $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories) + /bigobj %(AdditionalOptions) + 4453;28204 + _DEBUG;%(PreprocessorDefinitions) + + + + + d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies); + %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib + mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries) + $(OutDir)$(TargetName)$(TargetExt) + + + pch.h + $(IntDir)pch.pch + $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories) + /bigobj %(AdditionalOptions) + 4453;28204 + NDEBUG;%(PreprocessorDefinitions) + + + + + d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies); + %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib + mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries) + $(OutDir)$(TargetName)$(TargetExt) + + + pch.h + $(IntDir)pch.pch + $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories) + /bigobj %(AdditionalOptions) + 4453;28204 + NDEBUG;%(PreprocessorDefinitions) + + + + + d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies); + %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib + mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries) + $(OutDir)$(TargetName)$(TargetExt) + + + pch.h + $(IntDir)pch.pch + $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories) + /bigobj %(AdditionalOptions) + 4453;28204 + NDEBUG;%(PreprocessorDefinitions) + + + + + d2d1.lib; d3d11.lib; dxgi.lib; dwrite.lib; windowscodecs.lib; %(AdditionalDependencies); + %(AdditionalLibraryDirectories); $(VCInstallDir)\lib\store; $(VCInstallDir)\lib + mincore.lib;kernel32.lib;ole32.lib;%(IgnoreSpecificDefaultLibraries) + $(OutDir)$(TargetName)$(TargetExt) + + + pch.h + $(IntDir)pch.pch + $(ProjectDir);$(IntermediateOutputPath);%(AdditionalIncludeDirectories) + /bigobj %(AdditionalOptions) + 4453;28204 + NDEBUG;%(PreprocessorDefinitions) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Create + + + + + Designer + + + + + + + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + + + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + Pixel + 5.0 + + + Geometry + 5.0 + Geometry + 5.0 + Geometry + 5.0 + Geometry + 5.0 + Geometry + 5.0 + Geometry + 5.0 + Geometry + 5.0 + Geometry + 5.0 + + + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + + + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + Vertex + 5.0 + + + + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + \ No newline at end of file diff --git a/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.vcxproj.filters b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.vcxproj.filters new file mode 100644 index 0000000..dbc640a --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.vcxproj.filters @@ -0,0 +1,150 @@ + + + + + b65d7ef6-4502-4ff4-9c74-942a42e2db5f + + + a4e35f6e-771e-4e96-ae2a-6415fba206a4 + bmp;fbx;gif;jpg;jpeg;tga;tiff;tif;png + + + 2582b612-6d01-40f6-a199-be84879182e9 + + + Common + + + Common + + + Common + + + Common + + + Assets + + + Assets + + + Assets + + + Assets + + + Assets + + + Assets + + + {6104e536-a330-48ae-9fd5-8ac4d82ee784} + + + {381a19a1-a703-45a0-9212-41e34ce97352} + + + {301af131-be93-4f67-87af-cea676aa90c0} + + + + + Common + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Content + + + Content + + + Content + + + Source Files + + + Source Files + + + + + Common + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Content + + + Content + + + Content + + + Content + + + Content + + + Header Files + + + + + Assets + + + + + + + + + + + + Content\Shaders + + + Content\Shaders + + + Content\Shaders + + + Content\Shaders + + + Content\Shaders + + + \ No newline at end of file diff --git a/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.vcxproj.user b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.vcxproj.user new file mode 100644 index 0000000..c3839ba --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMapping.vcxproj.user @@ -0,0 +1,27 @@ + + + + 10.1.1.206 + UWPRemoteDebugger + + + 10.1.1.206 + WindowsDeviceDebugger + + + 10.1.1.206 + UWPRemoteDebugger + + + 10.1.1.206 + UWPRemoteDebugger + + + UWPRemoteDebugger + 10.1.1.199 + + + 10.1.1.199 + UWPRemoteDebugger + + \ No newline at end of file diff --git a/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMappingMain.cpp b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMappingMain.cpp new file mode 100644 index 0000000..ca57fde --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMappingMain.cpp @@ -0,0 +1,576 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" +#include "MSRHololensSpatialMappingMain.h" +#include "Common\DirectXHelper.h" + +#include +#include + +using namespace HoloLensNavigation; + +using namespace concurrency; +using namespace Microsoft::WRL; +using namespace Platform; +using namespace Windows::Foundation; +using namespace Windows::Foundation::Collections; +using namespace Windows::Foundation::Numerics; +using namespace Windows::Graphics::DirectX; +using namespace Windows::Graphics::DirectX::Direct3D11; +using namespace Windows::Graphics::Holographic; +using namespace Windows::Perception::Spatial; +using namespace Windows::Perception::Spatial::Surfaces; +using namespace Windows::UI::Input::Spatial; +using namespace std::placeholders; + +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +HoloLensNavigationMain::HoloLensNavigationMain(const std::shared_ptr& deviceResources) : + m_deviceResources(deviceResources) +{ + // + // Loads and initializes application assets when the application is loaded. + + _pGlobalTimer = &m_timer; + + m_fVerbose = false; + + m_deviceResources->RegisterDeviceNotify(this); + + // + // initialize vertices array + // m_vert = ref new Platform::Array(1); + + // + // set up socket listener + m_listenercontext = ref new ListenerContext(this); + + m_listener = StartListener(ref new ListenerContext::OnConnectionEvent(m_listenercontext, &ListenerContext::OnConnection), PORT); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +HoloLensNavigationMain::~HoloLensNavigationMain() +{ + // Deregister device notification. + m_deviceResources->RegisterDeviceNotify(nullptr); + + UnregisterHolographicEventHandlers(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::SetHolographicSpace(HolographicSpace^ holographicSpace) +{ + UnregisterHolographicEventHandlers(); + + m_holographicSpace = holographicSpace; + + m_meshRenderer = std::make_unique(m_deviceResources); + + m_spatialInputHandler = std::make_unique(); + + // Use the default SpatialLocator to track the motion of the device. + m_locator = SpatialLocator::GetDefault(); + + m_locatabilityChangedToken = m_locator->LocatabilityChanged += + ref new Windows::Foundation::TypedEventHandler( + std::bind(&HoloLensNavigationMain::OnLocatabilityChanged, this, _1, _2) + ); + + // respond to changes in the positional tracking state by cancelling deactivation + // of positional tracking. + m_positionalTrackingDeactivatingToken = m_locator->PositionalTrackingDeactivating += + ref new Windows::Foundation::TypedEventHandler( + std::bind(&HoloLensNavigationMain::OnPositionalTrackingDeactivating, this, _1, _2)); + + // Respond to camera added events by creating any resources that are specific + // to that camera, such as the back buffer render target view. + // When we add an event handler for CameraAdded, the API layer will avoid putting + // the new camera in new HolographicFrames until we complete the deferral we created + // for that handler, or return from the handler without creating a deferral. This + // allows the app to take more than one frame to finish creating resources and + // loading assets for the new holographic camera. + // This function should be registered before the app creates any HolographicFrames. + m_cameraAddedToken = m_holographicSpace->CameraAdded += + ref new Windows::Foundation::TypedEventHandler( + std::bind(&HoloLensNavigationMain::OnCameraAdded, this, _1, _2)); + + // Respond to camera removed events by releasing resources that were created for that + // camera. + // When the app receives a CameraRemoved event, it releases all references to the back + // buffer right away. This includes render target views, Direct2D target bitmaps, and so on. + // The app must also ensure that the back buffer is not attached as a render target, as + // shown in DeviceResources::ReleaseResourcesForBackBuffer. + m_cameraRemovedToken = m_holographicSpace->CameraRemoved += + ref new Windows::Foundation::TypedEventHandler( + std::bind(&HoloLensNavigationMain::OnCameraRemoved, this, _1, _2)); + + // This code sample uses a DeviceAttachedFrameOfReference to have the Spatial Mapping surface observer + // follow along with the device's location. + m_referenceFrame = m_locator->CreateAttachedFrameOfReferenceAtCurrentHeading(); + + // Notes on spatial tracking APIs: + // * Stationary reference frames are designed to provide a best-fit position relative to the + // overall space. Individual positions within that reference frame are allowed to drift slightly + // as the device learns more about the environment. + // * When precise placement of individual holograms is required, a SpatialAnchor should be used to + // anchor the individual hologram to a position in the real world - for example, a point the user + // indicates to be of special interest. Anchor positions do not drift, but can be corrected; the + // anchor will use the corrected position starting in the next frame after the correction has + // occurred. + + // Create a frame of reference that remains stationary relative to the user's surroundings, + // with its initial origin at the SpatialLocator's current location. + m_stationaryReferenceFrame = m_locator->CreateStationaryFrameOfReferenceAtCurrentLocation(); + + // Create a spatial anchor helper + m_spatialAnchorHelper = std::shared_ptr(new SpatialAnchorHelper(nullptr)); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::UnregisterHolographicEventHandlers() +{ + if (m_holographicSpace != nullptr) + { + // Clear previous event registrations. + + if (m_cameraAddedToken.Value != 0) + { + m_holographicSpace->CameraAdded -= m_cameraAddedToken; + m_cameraAddedToken.Value = 0; + } + + if (m_cameraRemovedToken.Value != 0) + { + m_holographicSpace->CameraRemoved -= m_cameraRemovedToken; + m_cameraRemovedToken.Value = 0; + } + } + + if (m_locator != nullptr) + { + m_locator->LocatabilityChanged -= m_locatabilityChangedToken; + m_locator->PositionalTrackingDeactivating -= m_positionalTrackingDeactivatingToken; + } + + if (m_surfaceObserver != nullptr) + { + m_surfaceObserver->ObservedSurfacesChanged -= m_surfacesChangedToken; + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::OnSurfacesChanged(SpatialSurfaceObserver^ sender, Object^ args) +{ + IMapView^ const& surfaceCollection = sender->GetObservedSurfaces(); + + // Process surface adds and updates. + for (const auto& pair : surfaceCollection) + { + auto id = pair->Key; + auto surfaceInfo = pair->Value; + + // Choose whether to add, or update the surface. + // In this example, new surfaces are treated differently by highlighting them in a different + // color. This allows you to observe changes in the spatial map that are due to new meshes, + // as opposed to mesh updates. + // In your app, you might choose to process added surfaces differently than updated + // surfaces. For example, you might prioritize processing of added surfaces, and + // defer processing of updates to existing surfaces. + if (m_meshRenderer->HasSurface(id)) + { + if (m_meshRenderer->GetLastUpdateTime(id).UniversalTime < surfaceInfo->UpdateTime.UniversalTime) + { + // Update existing surface. + m_meshRenderer->UpdateSurface(id, surfaceInfo); + } + } + else + { + // New surface. + m_meshRenderer->AddSurface(id, surfaceInfo); + } + } + + // Sometimes, a mesh will fall outside the area that is currently visible to + // the surface observer. In this code sample, we "sleep" any meshes that are + // not included in the surface collection to avoid rendering them. + // The system can including them in the collection again later, in which case + // they will no longer be hidden. + m_meshRenderer->HideInactiveMeshes(surfaceCollection); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +HolographicFrame^ HoloLensNavigationMain::Update() +{ + // + // Updates the application state once per frame. + + // Before doing the timer update, there is some work to do per-frame + // to maintain holographic rendering. First, we will get information + // about the current frame. + + // The HolographicFrame has information that the app needs in order + // to update and render the current frame. The app begins each new + // frame by calling CreateNextFrame. + HolographicFrame^ holographicFrame = m_holographicSpace->CreateNextFrame(); + + // Get a prediction of where holographic cameras will be when this frame + // is presented. + HolographicFramePrediction^ prediction = holographicFrame->CurrentPrediction; + + // Back buffers can change from frame to frame. Validate each buffer, and recreate + // resource views and depth buffers as needed. + m_deviceResources->EnsureCameraResources(holographicFrame, prediction); + + // Next, we get a coordinate system from the attached frame of reference that is + // associated with the current frame. Later, this coordinate system is used for + // for creating the stereo view matrices when rendering the sample content. + SpatialCoordinateSystem^ currentCoordinateSystem = m_referenceFrame->GetStationaryCoordinateSystemAtTimestamp(prediction->Timestamp); + + if (!InternalUpdate(holographicFrame)) + return holographicFrame; + + // Only create a surface observer when you need to - do not create a new one each frame. + if (m_surfaceObserver == nullptr) + { + // Initialize the Surface Observer using a valid coordinate system. + if (!m_spatialPerceptionAccessRequested) + { + // The spatial mapping API reads information about the user's environment. The user must + // grant permission to the app to use this capability of the Windows Holographic device. + auto initSurfaceObserverTask = create_task(SpatialSurfaceObserver::RequestAccessAsync()); + initSurfaceObserverTask.then([this, currentCoordinateSystem](Windows::Perception::Spatial::SpatialPerceptionAccessStatus status) + { + switch (status) + { + case SpatialPerceptionAccessStatus::Allowed: + m_surfaceAccessAllowed = true; + break; + default: + // Access was denied. This usually happens because your AppX manifest file does not declare the + // spatialPerception capability. + // For info on what else can cause this, see: http://msdn.microsoft.com/library/windows/apps/mt621422.aspx + m_surfaceAccessAllowed = false; + break; + } + }); + + m_spatialPerceptionAccessRequested = true; + } + } + + if (m_surfaceAccessAllowed) + { + SpatialBoundingBox axisAlignedBoundingBox = + { + { 0.f, 0.f, 0.f }, + { 20.f, 20.f, 5.f }, + }; + SpatialBoundingVolume^ bounds = SpatialBoundingVolume::FromBox(currentCoordinateSystem, axisAlignedBoundingBox); + + // If status is Allowed, we can create the surface observer. + if (m_surfaceObserver == nullptr) + { + // First, we'll set up the surface observer to use our preferred data formats. + // In this example, a "preferred" format is chosen that is compatible with our precompiled shader pipeline. + m_surfaceMeshOptions = ref new SpatialSurfaceMeshOptions(); + IVectorView^ supportedVertexPositionFormats = m_surfaceMeshOptions->SupportedVertexPositionFormats; + unsigned int formatIndex = 0; + if (supportedVertexPositionFormats->IndexOf(DirectXPixelFormat::R16G16B16A16IntNormalized, &formatIndex)) + { + m_surfaceMeshOptions->VertexPositionFormat = DirectXPixelFormat::R16G16B16A16IntNormalized; + } + IVectorView^ supportedVertexNormalFormats = m_surfaceMeshOptions->SupportedVertexNormalFormats; + if (supportedVertexNormalFormats->IndexOf(DirectXPixelFormat::R8G8B8A8IntNormalized, &formatIndex)) + { + m_surfaceMeshOptions->VertexNormalFormat = DirectXPixelFormat::R8G8B8A8IntNormalized; + } + + // If you are using a very high detail setting with spatial mapping, it can be beneficial + // to use a 32-bit unsigned integer format for indices instead of the default 16-bit. + // Uncomment the following code to enable 32-bit indices. + //IVectorView^ supportedTriangleIndexFormats = m_surfaceMeshOptions->SupportedTriangleIndexFormats; + //if (supportedTriangleIndexFormats->IndexOf(DirectXPixelFormat::R32UInt, &formatIndex)) + //{ + // m_surfaceMeshOptions->TriangleIndexFormat = DirectXPixelFormat::R32UInt; + //} + + // Create the observer. + m_surfaceObserver = ref new SpatialSurfaceObserver(); + if (m_surfaceObserver) + { + m_surfaceObserver->SetBoundingVolume(bounds); + + // If the surface observer was successfully created, we can initialize our + // collection by pulling the current data set. + auto mapContainingSurfaceCollection = m_surfaceObserver->GetObservedSurfaces(); + for (auto const& pair : mapContainingSurfaceCollection) + { + // Store the ID and metadata for each surface. + auto const& id = pair->Key; + auto const& surfaceInfo = pair->Value; + m_meshRenderer->AddSurface(id, surfaceInfo); + } + + // We then subcribe to an event to receive up-to-date data. + m_surfacesChangedToken = m_surfaceObserver->ObservedSurfacesChanged += + ref new TypedEventHandler(bind(&HoloLensNavigationMain::OnSurfacesChanged, this, _1, _2)); + } + } + + // Keep the surface observer positioned at the device's location. + m_surfaceObserver->SetBoundingVolume(bounds); + + // Note that it is possible to set multiple bounding volumes. Pseudocode: + // m_surfaceObserver->SetBoundingVolumes(/* iterable collection of bounding volumes*/); + // + // It is also possible to use other bounding shapes - such as a view frustum. Pseudocode: + // SpatialBoundingVolume^ bounds = SpatialBoundingVolume::FromFrustum(coordinateSystem, viewFrustum); + // m_surfaceObserver->SetBoundingVolume(bounds); + } + + // Check for new input state since the last frame. + SpatialInteractionSourceState^ pointerState = m_spatialInputHandler->CheckForInput(); + if (pointerState != nullptr) + { + // When a Pressed gesture is detected, the rendering mode will be changed to wireframe. + m_drawWireframe = !m_drawWireframe; + } + + m_timer.Tick([&] () + { +#ifdef TODO + if (m_baseAnchor != nullptr) { + const auto tryTransform = m_baseAnchor->CoordinateSystem->TryGetTransformTo(stationaryCoordinateSystem); + m_meshRenderer->Update(m_timer, m_baseAnchor->CoordinateSystem); + } + else +#endif // #ifdef TODO + { + m_meshRenderer->Update(m_timer, currentCoordinateSystem); + } + }); + + // + // collecting vertex data is expensive. When debugging and to ease CPU and GPU + // impact, disable the CollectSpatialMappinginformation() call if not part of + // investigation. +#ifndef DISABLE_COLLECTING_SPATIAL_MAPPING_DATA + CollectSpatialMappinginformation(); +#else // #ifndef DISABLE_COLLECTING_SPATIAL_MAPPING_DATA + #pragma message("====================== CollectSpatialMappinginformation() disabled ======================") +#endif // #ifndef DISABLE_COLLECTING_SPATIAL_MAPPING_DATA + + // This code uses default image stabilization settings, and does not set the focus point. + + // The holographic frame will be used to get up-to-date view and projection matrices and + // to present the swap chain. + return holographicFrame; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +bool HoloLensNavigationMain::Render(HolographicFrame^ holographicFrame) +{ + // + // Renders the current frame to each holographic camera, according to the + // current application and spatial positioning state. Returns true if the + // frame was rendered to at least one camera. + + // Don't try to render anything before the first Update. + if (m_timer.GetFrameCount() == 0) + return false; + + // Lock the set of holographic camera resources, then draw to each camera + // in this frame. + return m_deviceResources->UseHolographicCameraResources( + [this, holographicFrame](std::map>& cameraResourceMap) + { + // Up-to-date frame predictions enhance the effectiveness of image stablization and + // allow more accurate positioning of holograms. + holographicFrame->UpdateCurrentPrediction(); + HolographicFramePrediction^ prediction = holographicFrame->CurrentPrediction; + SpatialCoordinateSystem^ currentCoordinateSystem = m_referenceFrame->GetStationaryCoordinateSystemAtTimestamp(prediction->Timestamp); + + bool atLeastOneCameraRendered = false; + for (auto cameraPose : prediction->CameraPoses) + { + // This represents the device-based resources for a HolographicCamera. + DX::CameraResources* pCameraResources = cameraResourceMap[cameraPose->HolographicCamera->Id].get(); + + // Get the device context. + const auto context = m_deviceResources->GetD3DDeviceContext(); + const auto depthStencilView = pCameraResources->GetDepthStencilView(); + + // Set render targets to the current holographic camera. + ID3D11RenderTargetView* const targets[1] = { pCameraResources->GetBackBufferRenderTargetView() }; + context->OMSetRenderTargets(1, targets, depthStencilView); + + // Clear the back buffer and depth stencil view. + context->ClearRenderTargetView(targets[0], DirectX::Colors::Transparent); + context->ClearDepthStencilView(depthStencilView, D3D11_CLEAR_DEPTH | D3D11_CLEAR_STENCIL, 1.0f, 0); + + // The view and projection matrices for each holographic camera will change + // every frame. This function refreshes the data in the constant buffer for + // the holographic camera indicated by cameraPose. + pCameraResources->UpdateViewProjectionBuffer(m_deviceResources, cameraPose, currentCoordinateSystem); + + // Attach the view/projection constant buffer for this camera to the graphics pipeline. + bool cameraActive = pCameraResources->AttachViewProjectionBuffer(m_deviceResources); + + // Only render world-locked content when positional tracking is active. + if (cameraActive) + { + // Draw the hologram. + m_meshRenderer->Render(pCameraResources->IsRenderingStereoscopic(), m_drawWireframe); + + // On versions of the platform that support the CommitDirect3D11DepthBuffer API, we can + // provide the depth buffer to the system, and it will use depth information to stabilize + // the image at a per-pixel level. + static const bool canCommitDirect3D11DepthBuffer = + Windows::Foundation::Metadata::ApiInformation::IsMethodPresent("Windows.Graphics.Holographic.HolographicCameraRenderingParameters", "CommitDirect3D11DepthBuffer"); + + if (canCommitDirect3D11DepthBuffer) + { + HolographicCameraRenderingParameters^ renderingParameters = holographicFrame->GetRenderingParameters(cameraPose); + ComPtr spDepthStencil = pCameraResources->GetDepthStencilTexture2D(); + + // Direct3D interop APIs are used to provide the buffer to the WinRT API. + ComPtr depthStencilResource; + DX::ThrowIfFailed(spDepthStencil.As(&depthStencilResource)); + ComPtr depthDxgiSurface; + DX::ThrowIfFailed(depthStencilResource->CreateSubresourceSurface(0, &depthDxgiSurface)); + IDirect3DSurface^ depthD3DSurface = CreateDirect3DSurface(depthDxgiSurface.Get()); + + // Calling CommitDirect3D11DepthBuffer causes the system to queue Direct3D commands to + // read the depth buffer. It will then use that information to stabilize the image as + // the HolographicFrame is presented. + renderingParameters->CommitDirect3D11DepthBuffer(depthD3DSurface); + } + } + + atLeastOneCameraRendered = true; + } + + return atLeastOneCameraRendered; + }); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::SaveAppState() +{ + if (m_spatialAnchorHelper != nullptr) + { + m_spatialAnchorHelper->TrySaveToAnchorStore(); + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::LoadAppState() +{ + LoadAnchorStore(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::OnDeviceLost() +{ + // Notifies classes that use Direct3D device resources that the device resources + // need to be released before this method returns. + + m_meshRenderer->ReleaseDeviceDependentResources(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::OnDeviceRestored() +{ + // Notifies classes that use Direct3D device resources that the device resources + // may now be recreated. + + m_meshRenderer->CreateDeviceDependentResources(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::OnPositionalTrackingDeactivating(SpatialLocator^ sender, SpatialLocatorPositionalTrackingDeactivatingEventArgs^ args) +{ + // Without positional tracking, spatial meshes will not be locatable. + args->Canceled = true; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::OnCameraAdded(HolographicSpace^ sender, HolographicSpaceCameraAddedEventArgs^ args) +{ + Deferral^ deferral = args->GetDeferral(); + HolographicCamera^ holographicCamera = args->Camera; + create_task([this, deferral, holographicCamera] () + { + // Create device-based resources for the holographic camera and add it to the list of + // cameras used for updates and rendering. Notes: + // * Since this function may be called at any time, the AddHolographicCamera function + // waits until it can get a lock on the set of holographic camera resources before + // adding the new camera. At 60 frames per second this wait should not take long. + // * A subsequent Update will take the back buffer from the RenderingParameters of this + // camera's CameraPose and use it to create the ID3D11RenderTargetView for this camera. + // Content can then be rendered for the HolographicCamera. + m_deviceResources->AddHolographicCamera(holographicCamera); + + // Holographic frame predictions will not include any information about this camera until + // the deferral is completed. + deferral->Complete(); + }); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::OnCameraRemoved(HolographicSpace^ sender, HolographicSpaceCameraRemovedEventArgs^ args) +{ + // Before letting this callback return, ensure that all references to the back buffer + // are released. + // Since this function may be called at any time, the RemoveHolographicCamera function + // waits until it can get a lock on the set of holographic camera resources before + // deallocating resources for this camera. At 60 frames per second this wait should + // not take long. + m_deviceResources->RemoveHolographicCamera(args->Camera); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void HoloLensNavigationMain::OnLocatabilityChanged(SpatialLocator^ sender, Object^ args) +{ + // String^ mes = sender->Locatability.ToString(); + + switch (sender->Locatability) + { + case SpatialLocatability::Unavailable: + // Holograms cannot be rendered. + break; + case SpatialLocatability::PositionalTrackingActivating: + break; + case SpatialLocatability::OrientationOnly: + break; + case SpatialLocatability::PositionalTrackingInhibited: + break; + case SpatialLocatability::PositionalTrackingActive: + break; + } +} diff --git a/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMappingMain.h b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMappingMain.h new file mode 100644 index 0000000..4aaba7a --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/MSRHoloLensSpatialMappingMain.h @@ -0,0 +1,232 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include "Common\DeviceResources.h" +#include "Common\StepTimer.h" + +#include "Content\SpatialInputHandler.h" +#include "Content\RealtimeSurfaceMeshRenderer.h" + +#include "Content\SurfaceMesh.h" + +#include "SpatialAnchor.h" + +#include +#include +#include +#include + +#define PORT "1234" + +namespace HoloLensNavigation +{ + enum InternalStatus { + UNCONNECTED, + READY_TO_RECEIVE, + RECEIVING, + PROCESSING, + READY_TO_SEND, + SENDING + }; + + enum EnumAskedState { + Default_Ask_Pose, + Refresh_Anchor_And_Render_Map, + Update_Anchor + }; + + enum EnumRepliedState { + Reply_Pose, + Reply_Anchor_Pose, + Reply_Anchor_Pose_Map + }; + + class HoloLensNavigationMain; + + ref class ListenerContext + { + internal: + ListenerContext(HoloLensNavigationMain* mappingmain) + { + m_mappingmain = mappingmain; + } + delegate void OnConnectionEvent(Windows::Networking::Sockets::StreamSocketListener^ listener, Windows::Networking::Sockets::StreamSocketListenerConnectionReceivedEventArgs^ object); + public: + void OnConnection(Windows::Networking::Sockets::StreamSocketListener^ listener, Windows::Networking::Sockets::StreamSocketListenerConnectionReceivedEventArgs^ object); + + internal: + Windows::Storage::Streams::DataWriter ^ m_writer; + Windows::Storage::Streams::DataReader ^ m_reader; + Windows::Networking::Sockets::StreamSocket ^ m_socket; + std::mutex m_sockLock; + + private: + HoloLensNavigationMain * m_mappingmain; + }; + + // + // Updates, renders, and presents holographic content using Direct3D. + class HoloLensNavigationMain : public DX::IDeviceNotify + { + public: + HoloLensNavigationMain(const std::shared_ptr& deviceResources); + ~HoloLensNavigationMain(); + + // Sets the holographic space. This is our closest analogue to setting a new window + // for the app. + void SetHolographicSpace(Windows::Graphics::Holographic::HolographicSpace^ holographicSpace); + + // Starts the holographic frame and updates the content. + Windows::Graphics::Holographic::HolographicFrame^ Update(); + + // Renders holograms, including world-locked content. + bool Render(Windows::Graphics::Holographic::HolographicFrame^ holographicFrame); + + // Handle saving and loading of app state owned by AppMain. + void SaveAppState(); + void LoadAppState(); + + // IDeviceNotify + virtual void OnDeviceLost(); + virtual void OnDeviceRestored(); + + // Handle surface change events. + void OnSurfacesChanged(Windows::Perception::Spatial::Surfaces::SpatialSurfaceObserver^ sender, Platform::Object^ args); + + public: + // + // socket connection state handlers + Windows::Networking::Sockets::StreamSocketListener^ StartListener(ListenerContext::OnConnectionEvent^ onConnectionEvent, Platform::String^ port); + void StateSender(); + void StateReceiver(); + + private: + // Asynchronously creates resources for new holographic cameras. + void OnCameraAdded( + Windows::Graphics::Holographic::HolographicSpace^ sender, + Windows::Graphics::Holographic::HolographicSpaceCameraAddedEventArgs^ args); + + // Synchronously releases resources for holographic cameras that are no longer + // attached to the system. + void OnCameraRemoved( + Windows::Graphics::Holographic::HolographicSpace^ sender, + Windows::Graphics::Holographic::HolographicSpaceCameraRemovedEventArgs^ args); + + // Used to prevent the device from deactivating positional tracking, which is + // necessary to continue to receive spatial mapping data. + void OnPositionalTrackingDeactivating( + Windows::Perception::Spatial::SpatialLocator^ sender, + Windows::Perception::Spatial::SpatialLocatorPositionalTrackingDeactivatingEventArgs^ args); + + void OnLocatabilityChanged( + Windows::Perception::Spatial::SpatialLocator^ sender, + Platform::Object^ args); + + // Clears event registration state. Used when changing to a new HolographicSpace + // and when tearing down AppMain. + void UnregisterHolographicEventHandlers(); + + // Listens for the Pressed spatial input event. + std::shared_ptr m_spatialInputHandler; + + // A data handler for surface meshes. + std::unique_ptr m_meshRenderer; + + private: + // Cached pointer to device resources. + std::shared_ptr m_deviceResources; + + // Render loop timer. + DX::StepTimer m_timer; + + // Represents the holographic space around the user. + Windows::Graphics::Holographic::HolographicSpace ^ m_holographicSpace; + + // SpatialLocator that is attached to the primary camera. + Windows::Perception::Spatial::SpatialLocator ^ m_locator; + + // A reference frame attached to the holographic camera. + Windows::Perception::Spatial::SpatialLocatorAttachedFrameOfReference^ m_referenceFrame; + + // Event registration tokens. + Windows::Foundation::EventRegistrationToken m_cameraAddedToken; + Windows::Foundation::EventRegistrationToken m_cameraRemovedToken; + Windows::Foundation::EventRegistrationToken m_positionalTrackingDeactivatingToken; + Windows::Foundation::EventRegistrationToken m_surfacesChangedToken; + Windows::Foundation::EventRegistrationToken m_locatabilityChangedToken; + + // Indicates whether access to spatial mapping data has been granted. + bool m_surfaceAccessAllowed = false; + + // Indicates whether the surface observer initialization process was started. + bool m_spatialPerceptionAccessRequested = false; + + // Obtains spatial mapping data from the device in real time. + Windows::Perception::Spatial::Surfaces::SpatialSurfaceObserver ^ m_surfaceObserver; + Windows::Perception::Spatial::Surfaces::SpatialSurfaceMeshOptions ^ m_surfaceMeshOptions; + + // Determines the rendering mode. + bool m_drawWireframe = true; + + + // + // hololens navigation +private: + // A frame of reference that remains stationary relative to the user's surroundings at a point in time. + Windows::Perception::Spatial::SpatialStationaryFrameOfReference^ m_stationaryReferenceFrame; + + bool InternalUpdate(Windows::Graphics::Holographic::HolographicFrame^ holographicFrame); + void CollectSpatialMappinginformation(); + void FloorDetection(unsigned char* buffer, unsigned int uBufferSize, double& HoloHeight, Eigen::Vector3d& floorpt); + + std::shared_ptr m_spatialAnchorHelper; + Windows::Perception::Spatial::SpatialAnchor ^ m_baseAnchor; + Windows::Perception::Spatial::SpatialAnchor ^ m_nextAnchor; + Windows::Perception::Spatial::SpatialAnchor ^ m_anchor; + + void LoadAnchorStore(); + Platform::String ^ m_newKey; + Platform::String ^ m_AnchorKey; + int m_spatialId = 0; + Windows::Foundation::Numerics::float4x4 m_initNewAnchorPositionFromPrevAnchor; + + // kept current state + Windows::Foundation::Numerics::float4x4 m_curPositionFromAnchor; + float m_floorAndHeight[4] = { 0,0,0,0 }; + + // send functions + void SendPose(); + void SendInitAnchorPose(); + void SendAnchorID(); + void SendPointCloud(); + + // + // socket management + friend ListenerContext; + ListenerContext ^ m_listenercontext; + Windows::Networking::Sockets::StreamSocketListener ^ m_listener; + + // + // connection status + bool m_connecting = false; + bool m_bCommunicating = false, m_bNextSend = false; + EnumAskedState m_askedState = EnumAskedState::Default_Ask_Pose; + EnumRepliedState m_replyedState = EnumRepliedState::Reply_Pose; + InternalStatus m_internalState = InternalStatus::UNCONNECTED; + + // + // log output flag + bool m_fVerbose; + }; + +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/Miscellaneous.cpp b/windows/MSRHoloLensSpatialMapping/Miscellaneous.cpp new file mode 100644 index 0000000..559e9c3 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Miscellaneous.cpp @@ -0,0 +1,93 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" +#include + +using namespace HoloLensNavigation; + +DX::StepTimer * _pGlobalTimer = nullptr; + +/*************************************************************************************************/ +/* */ +/*************************************************************************************************/ +void HoloLensNavigation::DebugMsgW(PCWSTR pwszMessage, ...) +{ + WCHAR ach[2048]; + va_list vArgs; + + va_start(vArgs, pwszMessage); + + StringCchVPrintfW(ach, _countof(ach), pwszMessage, vArgs); + + va_end(vArgs); + + StringCchCatW(ach, _countof(ach), L"\r\n"); + + OutputDebugStringW(ach); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +StaticMemoryBuffer::StaticMemoryBuffer(void) +{ + InitializeCriticalSection(&m_cs); + m_pvBuffer = nullptr; + m_uAllocatedBufferSize = 0; + m_uRequestedBufferSize = 0; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +StaticMemoryBuffer::~StaticMemoryBuffer(void) +{ + LockRWAccess(); + if (m_pvBuffer) { + free(m_pvBuffer); + m_pvBuffer = nullptr; + } + m_uAllocatedBufferSize = 0; + m_uRequestedBufferSize = 0; + UnlockRWAccess(); + + DeleteCriticalSection(&m_cs); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void* StaticMemoryBuffer::realloc(unsigned int uBufferSize) +{ + LockRWAccess(); + + if (m_uAllocatedBufferSize < uBufferSize) { + if (m_pvBuffer == nullptr) + m_pvBuffer = ::malloc(uBufferSize); + else + m_pvBuffer = ::realloc(m_pvBuffer, uBufferSize); + + // + // out of memory? + if (m_pvBuffer == nullptr) { + m_uAllocatedBufferSize = 0; + m_uRequestedBufferSize = 0; + UnlockRWAccess(); + return nullptr; + } + + m_uAllocatedBufferSize = uBufferSize; + } + + m_uRequestedBufferSize = uBufferSize; + + UnlockRWAccess(); + + return m_pvBuffer; +} diff --git a/windows/MSRHoloLensSpatialMapping/Miscellaneous.h b/windows/MSRHoloLensSpatialMapping/Miscellaneous.h new file mode 100644 index 0000000..c2663e1 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Miscellaneous.h @@ -0,0 +1,63 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +namespace DX +{ + class StepTimer; +}; + +extern DX::StepTimer * _pGlobalTimer; + +namespace HoloLensNavigation +{ + void DebugMsgW(PCWSTR pwszMessage, ...); + + class CSLock + { + public: + //---------------------------------------------------------- + CSLock(CRITICAL_SECTION* pcs) + { + m_pcs = pcs; + EnterCriticalSection(m_pcs); + } + //---------------------------------------------------------- + ~CSLock() + { + LeaveCriticalSection(m_pcs); + } + + private: + CRITICAL_SECTION* m_pcs; + }; + + class StaticMemoryBuffer + { + public: + StaticMemoryBuffer(void); + ~StaticMemoryBuffer(void); + + void* realloc(unsigned int uBufferSize); + _inline void LockRWAccess(void) { EnterCriticalSection(&m_cs); } + _inline void UnlockRWAccess(void) { LeaveCriticalSection(&m_cs); } + _inline void* getPointer(void) { return m_pvBuffer; } + _inline unsigned int getBufferSize(void) { return m_uRequestedBufferSize; } + + private: + CRITICAL_SECTION m_cs; + void* m_pvBuffer; + unsigned int m_uAllocatedBufferSize; + unsigned int m_uRequestedBufferSize; + }; + +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/Package.appxmanifest b/windows/MSRHoloLensSpatialMapping/Package.appxmanifest new file mode 100644 index 0000000..f1afb97 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/Package.appxmanifest @@ -0,0 +1,32 @@ + + + + + + MSRHoloLensSpatialMapping + Microsoft Applied Robotics Research + Assets\StoreLogo.png + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/windows/MSRHoloLensSpatialMapping/SpatialAnchor.cpp b/windows/MSRHoloLensSpatialMapping/SpatialAnchor.cpp new file mode 100644 index 0000000..a55efa8 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/SpatialAnchor.cpp @@ -0,0 +1,87 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#include "pch.h" +#include "SpatialAnchor.h" + + +using namespace HoloLensNavigation; +using namespace Windows::Perception::Spatial; +using namespace Platform; + +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +SpatialAnchorHelper::SpatialAnchorHelper(SpatialAnchorStore^ anchorStore) +{ + m_anchorStore = anchorStore; + m_anchorMap = ref new Platform::Collections::Map(); +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +bool SpatialAnchorHelper::TrySaveToAnchorStore() +{ + // This function returns true if all the anchors in the in-memory collection are saved to the anchor + // store. If zero anchors are in the in-memory collection, we will still return true because the + // condition has been met. + bool success = true; + + // If access is denied, 'anchorStore' will not be obtained. + if (m_anchorStore != nullptr) + { + for each (auto & pair in m_anchorMap) + { + auto const& id = pair->Key; + auto const& anchor = pair->Value; + + // Try to save the anchors. + if (!m_anchorStore->TrySave(id, anchor)) + { + // This may indicate the anchor ID is taken, or the anchor limit is reached for the app. + success = false; + } + } + } + + return success; +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SpatialAnchorHelper::LoadFromAnchorStore() +{ + // If access is denied, 'anchorStore' will not be obtained. + if (m_anchorStore != nullptr) + { + // Get all saved anchors. + auto anchorMapView = m_anchorStore->GetAllSavedAnchors(); + for each (auto const& pair in anchorMapView) + { + auto const& id = pair->Key; + auto const& anchor = pair->Value; + m_anchorMap->Insert(id, anchor); + } + } +} +/***********************************************************************************************************/ +/* */ +/***********************************************************************************************************/ +void SpatialAnchorHelper::ClearAnchorStore() +{ + // If access is denied, 'anchorStore' will not be obtained. + if (m_anchorStore != nullptr) + { + // Clear all anchors from the store. + m_anchorMap->Clear(); + m_anchorStore->Clear(); + } +} diff --git a/windows/MSRHoloLensSpatialMapping/SpatialAnchor.h b/windows/MSRHoloLensSpatialMapping/SpatialAnchor.h new file mode 100644 index 0000000..8ecc7ef --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/SpatialAnchor.h @@ -0,0 +1,32 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include + +namespace HoloLensNavigation +{ + class SpatialAnchorHelper + { + public: + SpatialAnchorHelper(Windows::Perception::Spatial::SpatialAnchorStore^ anchorStore); + Windows::Foundation::Collections::IMap^ GetAnchorMap() { return m_anchorMap; }; + void LoadFromAnchorStore(); + void ClearAnchorStore(); + bool TrySaveToAnchorStore(); + + private: + Windows::Perception::Spatial::SpatialAnchorStore^ m_anchorStore; + Windows::Foundation::Collections::IMap^ m_anchorMap; + }; + +} // namespace HoloLensNavigation diff --git a/windows/MSRHoloLensSpatialMapping/packages.config b/windows/MSRHoloLensSpatialMapping/packages.config new file mode 100644 index 0000000..caa5e56 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/windows/MSRHoloLensSpatialMapping/pch.cpp b/windows/MSRHoloLensSpatialMapping/pch.cpp new file mode 100644 index 0000000..bcb5590 --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/pch.cpp @@ -0,0 +1 @@ +#include "pch.h" diff --git a/windows/MSRHoloLensSpatialMapping/pch.h b/windows/MSRHoloLensSpatialMapping/pch.h new file mode 100644 index 0000000..ce324da --- /dev/null +++ b/windows/MSRHoloLensSpatialMapping/pch.h @@ -0,0 +1,30 @@ +//********************************************************* +// +// Copyright (c) Microsoft. All rights reserved. +// This code is licensed under the MIT License (MIT). +// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF +// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY +// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR +// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. +// +//********************************************************* + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "Miscellaneous.h"