Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add CameraExposureOffset and Fix Disparity to Depth Computation #346

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions depthai_bridge/include/depthai_bridge/ImageConverter.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,11 @@ using TimePoint = std::chrono::time_point<std::chrono::steady_clock, std::chrono
class ImageConverter {
public:
// ImageConverter() = default;
ImageConverter(const std::string frameName, bool interleaved, bool getBaseDeviceTimestamp = false);
ImageConverter(bool interleaved, bool getBaseDeviceTimestamp = false);
ImageConverter(const std::string frameName,
bool interleaved,
bool getBaseDeviceTimestamp = false,
dai::CameraExposureOffset offset = dai::CameraExposureOffset::END);
ImageConverter(bool interleaved, bool getBaseDeviceTimestamp = false, dai::CameraExposureOffset offset = dai::CameraExposureOffset::END);

/**
* @brief Handles cases in which the ROS time shifts forward or backward
Expand Down Expand Up @@ -82,6 +85,7 @@ class ImageConverter {

::ros::Time _rosBaseTime;
bool _getBaseDeviceTimestamp;
dai::CameraExposureOffset _offset;
// For handling ROS time shifts and debugging
int64_t _totalNsChange{0};
// Whether to update the ROS base time on each message conversion
Expand Down
40 changes: 22 additions & 18 deletions depthai_bridge/src/ImageConverter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,17 @@ std::unordered_map<dai::RawImgFrame::Type, std::string> ImageConverter::planarEn
{dai::RawImgFrame::Type::NV12, "rgb8"},
{dai::RawImgFrame::Type::YUV420p, "rgb8"}};

ImageConverter::ImageConverter(bool interleaved, bool getBaseDeviceTimestamp)
: _daiInterleaved(interleaved), _steadyBaseTime(std::chrono::steady_clock::now()), _getBaseDeviceTimestamp(getBaseDeviceTimestamp) {
ImageConverter::ImageConverter(bool interleaved, bool getBaseDeviceTimestamp, dai::CameraExposureOffset offset)
: _daiInterleaved(interleaved), _steadyBaseTime(std::chrono::steady_clock::now()), _getBaseDeviceTimestamp(getBaseDeviceTimestamp), _offset(offset) {
_rosBaseTime = ::ros::Time::now();
}

ImageConverter::ImageConverter(const std::string frameName, bool interleaved, bool getBaseDeviceTimestamp)
: _frameName(frameName), _daiInterleaved(interleaved), _steadyBaseTime(std::chrono::steady_clock::now()), _getBaseDeviceTimestamp(getBaseDeviceTimestamp) {
ImageConverter::ImageConverter(const std::string frameName, bool interleaved, bool getBaseDeviceTimestamp, dai::CameraExposureOffset offset)
: _frameName(frameName),
_daiInterleaved(interleaved),
_steadyBaseTime(std::chrono::steady_clock::now()),
_getBaseDeviceTimestamp(getBaseDeviceTimestamp),
_offset(offset) {
_rosBaseTime = ::ros::Time::now();
}

Expand All @@ -47,9 +51,9 @@ void ImageConverter::toRosMsgFromBitStream(std::shared_ptr<dai::ImgFrame> inData
}
std::chrono::_V2::steady_clock::time_point tstamp;
if(_getBaseDeviceTimestamp)
tstamp = inData->getTimestampDevice();
tstamp = inData->getTimestampDevice(_offset);
else
tstamp = inData->getTimestamp();
tstamp = inData->getTimestamp(_offset);
ImageMsgs::Image outImageMsg;
StdMsgs::Header header;
header.frame_id = _frameName;
Expand All @@ -74,18 +78,18 @@ void ImageConverter::toRosMsgFromBitStream(std::shared_ptr<dai::ImgFrame> inData
break;
}
default: {
throw(std::runtime_error("Converted type not supported!"));
throw std::runtime_error("Converted type not supported!");
}
}

output = cv::imdecode(cv::Mat(inData->getData()), decodeFlags);

// converting disparity
if(type == dai::RawImgFrame::Type::RAW8) {
auto factor = (info.K[0] * info.P[3]);
auto factor = std::abs(info.P[3]) * 1000;
cv::Mat depthOut = cv::Mat(cv::Size(output.cols, output.rows), CV_16UC1);
depthOut.forEach<short>([&output, &factor](short& pixel, const int* position) -> void {
auto disp = output.at<int8_t>(position);
depthOut.forEach<uint16_t>([&output, &factor](uint16_t& pixel, const int* position) -> void {
auto disp = output.at<uint8_t>(position);
if(disp == 0)
pixel = 0;
else
Expand All @@ -104,9 +108,9 @@ void ImageConverter::toRosMsg(std::shared_ptr<dai::ImgFrame> inData, std::deque<
}
std::chrono::_V2::steady_clock::time_point tstamp;
if(_getBaseDeviceTimestamp)
tstamp = inData->getTimestampDevice();
tstamp = inData->getTimestampDevice(_offset);
else
tstamp = inData->getTimestamp();
tstamp = inData->getTimestamp(_offset);
ImageMsgs::Image outImageMsg;
StdMsgs::Header header;
header.frame_id = _frameName;
Expand All @@ -130,7 +134,7 @@ void ImageConverter::toRosMsg(std::shared_ptr<dai::ImgFrame> inData, std::deque<
break;

default:
std::runtime_error("Invalid dataType inputs..");
throw std::runtime_error("Invalid dataType inputs..");
break;
}
mat = cv::Mat(size, type, inData->getData().data());
Expand Down Expand Up @@ -204,7 +208,7 @@ void ImageConverter::toDaiMsg(const ImageMsgs::Image& inMsg, dai::ImgFrame& outD
return pair.second == inMsg.encoding;
});
if(revEncodingIter == encodingEnumMap.end())
std::runtime_error(
throw std::runtime_error(
"Unable to find DAI encoding for the corresponding "
"sensor_msgs::image.encoding stream");

Expand Down Expand Up @@ -267,7 +271,7 @@ void ImageConverter::planarToInterleaved(const std::vector<uint8_t>& srcData, st
destData[i * 3 + 2] = r;
}
} else {
std::runtime_error(
throw std::runtime_error(
"If you encounter the scenario where you need this "
"please create an issue on github");
}
Expand All @@ -287,7 +291,7 @@ void ImageConverter::interleavedToPlanar(const std::vector<uint8_t>& srcData, st
destData[i + w * h * 2] = r;
}
} else {
std::runtime_error(
throw std::runtime_error(
"If you encounter the scenario where you need this "
"please create an issue on github");
}
Expand All @@ -301,7 +305,7 @@ cv::Mat ImageConverter::rosMsgtoCvMat(ImageMsgs::Image& inMsg) {
cv::cvtColor(nv_frame, rgb, cv::COLOR_YUV2BGR_NV12);
return rgb;
} else {
std::runtime_error("This frature is still WIP");
throw std::runtime_error("This frature is still WIP");
return rgb;
}
}
Expand Down Expand Up @@ -387,4 +391,4 @@ ImageMsgs::CameraInfo ImageConverter::calibrationToCameraInfo(
}

} // namespace ros
} // namespace dai
} // namespace dai
Loading