[dualcamgrabber] working with software triggering
This commit is contained in:
parent
4f2d45df6b
commit
0fa532d59a
@ -18,9 +18,12 @@ void DualcamGrabber::run() {
|
||||
}
|
||||
|
||||
Pylon::CInstantCameraArray &cameras = wrapper->getCameraArray();
|
||||
wrapper->frameRate(static_cast<uint>(framerate), -1);
|
||||
wrapper->frameRate(static_cast<uint>(100), -1);
|
||||
|
||||
wrapper->exposureTime(exposure);
|
||||
wrapper->gain(gain);
|
||||
wrapper->enableSoftwareTrigger(0);
|
||||
wrapper->enableSoftwareTrigger(1);
|
||||
|
||||
cameras.StartGrabbing();
|
||||
Pylon::CGrabResultPtr frame0, frame1;
|
||||
@ -28,40 +31,76 @@ void DualcamGrabber::run() {
|
||||
Pylon::CPylonImage rightImage;
|
||||
Pylon::CPylonImage stitchedImage;
|
||||
std::string errorMessage = "";
|
||||
bool failure = false;
|
||||
|
||||
auto before = high_resolution_clock::now();
|
||||
auto after = high_resolution_clock::now();
|
||||
auto after1 = high_resolution_clock::now();
|
||||
auto stitch = high_resolution_clock::now();
|
||||
auto stitchncopy = high_resolution_clock::now();
|
||||
auto grab_duration = duration_cast<microseconds>(after - before);
|
||||
auto grab1_duration = duration_cast<microseconds>(after1 - before);
|
||||
auto stitch_duration = duration_cast<microseconds>(stitch - after);
|
||||
auto stitchncopy_duration = duration_cast<microseconds>(stitchncopy - after);
|
||||
auto done = high_resolution_clock::now();
|
||||
auto total_duration = duration_cast<microseconds>(done - before);
|
||||
int expected_usecs = (int)(1./framerate * 1000000);
|
||||
|
||||
while (cameras.IsGrabbing() && !stop_request && !failure) {
|
||||
if (counter > 0) {
|
||||
long delay = total_duration.count() - expected_usecs;
|
||||
if (delay > 0) {
|
||||
emit delayed(delay, counter-1);
|
||||
} else {
|
||||
// std::cerr << "expected ifi " << expected_usecs << " real ifi: " << total_duration.count() << " waiting for " << (-1*delay) << std::endl;
|
||||
usleep(-delay);
|
||||
}
|
||||
}
|
||||
before = high_resolution_clock::now();
|
||||
|
||||
while (cameras.IsGrabbing() && !stop_request) {
|
||||
auto before = high_resolution_clock::now();
|
||||
auto after1 = high_resolution_clock::now();
|
||||
if (cameras[0].WaitForFrameTriggerReady(1000, Pylon::TimeoutHandling_ThrowException) &
|
||||
cameras[1].WaitForFrameTriggerReady(1000, Pylon::TimeoutHandling_ThrowException)) {
|
||||
// std::cerr << "executing software triggers" << std::endl;
|
||||
cameras[0].ExecuteSoftwareTrigger();
|
||||
cameras[1].ExecuteSoftwareTrigger();
|
||||
}
|
||||
|
||||
try {
|
||||
cameras[0].RetrieveResult( 5000, frame0, Pylon::TimeoutHandling_ThrowException );
|
||||
after1 = high_resolution_clock::now();
|
||||
cameras[1].RetrieveResult( 5000, frame1, Pylon::TimeoutHandling_ThrowException );
|
||||
leftImage.AttachGrabResultBuffer( frame0 );
|
||||
rightImage.AttachGrabResultBuffer( frame1 );
|
||||
} catch( ... ) {
|
||||
qDebug() << "Grabbing frame failed! " << e.what();
|
||||
failure = true
|
||||
}
|
||||
after = high_resolution_clock::now();
|
||||
|
||||
if (!failure && leftImage.IsValid() && rightImage.IsValid()) {
|
||||
try {
|
||||
cameras[0].RetrieveResult( 5000, frame0, Pylon::TimeoutHandling_ThrowException );
|
||||
after1 = high_resolution_clock::now();
|
||||
cameras[1].RetrieveResult( 5000, frame1, Pylon::TimeoutHandling_ThrowException );
|
||||
leftImage.AttachGrabResultBuffer( frame0 );
|
||||
rightImage.AttachGrabResultBuffer( frame1 );
|
||||
StitchImage::StitchToRight(leftImage, rightImage, &stitchedImage, errorMessage);
|
||||
stitch = high_resolution_clock::now();
|
||||
MyImage *img = new MyImage(stitchedImage.GetWidth(), stitchedImage.GetHeight());
|
||||
img->setFrame(stitchedImage);
|
||||
buffer->push(img);
|
||||
} catch(const std::exception& e) {
|
||||
qDebug() << "Grabbing frame failed! " << e.what();
|
||||
std::cerr << e.what() << '\n';
|
||||
}
|
||||
auto after = high_resolution_clock::now();
|
||||
if (leftImage.IsValid() && rightImage.IsValid()) {
|
||||
try {
|
||||
StitchImage::StitchToRight(leftImage, rightImage, &stitchedImage, errorMessage);
|
||||
MyImage *img = new MyImage(stitchedImage.GetWidth(), stitchedImage.GetHeight());
|
||||
img->setFrame(stitchedImage);
|
||||
buffer->push(img);
|
||||
} catch(const std::exception& e) {
|
||||
std::cerr << e.what() << '\n';
|
||||
}
|
||||
}
|
||||
auto stitch = high_resolution_clock::now();
|
||||
auto grab_duration = duration_cast<microseconds>(after - before);
|
||||
auto grab1_duration = duration_cast<microseconds>(after1 - before);
|
||||
auto stitch_duration = duration_cast<microseconds>(stitch - after);
|
||||
std::cerr << "framecount: " << counter << " grab_duration (us): " << grab_duration.count() << "\t" << " stitching (us): " << stitch_duration.count() << " grab1_duration (us): " << grab1_duration.count() << std::endl;
|
||||
counter += 1;
|
||||
auto done = high_resolution_clock::now();
|
||||
auto total_duration = duration_cast<microseconds>(done - before);
|
||||
double rate = (1./((double)total_duration.count()/1000000));
|
||||
std::cerr << "total_duration (us): " << total_duration.count() << " rate: " << rate << std::endl;
|
||||
}
|
||||
|
||||
stitchncopy = high_resolution_clock::now();
|
||||
grab_duration = duration_cast<microseconds>(after - before);
|
||||
grab1_duration = duration_cast<microseconds>(after1 - before);
|
||||
stitch_duration = duration_cast<microseconds>(stitch - after);
|
||||
stitchncopy_duration = duration_cast<microseconds>(stitchncopy - after);
|
||||
// std::cerr << "framecount: " << counter << " grab1_duration (us): " << grab1_duration.count() << " grab_all (us): " << grab_duration.count() << std::endl;
|
||||
// std::cerr << "\tpure stich (us): " << stitch_duration.count() << " stich'n'copy (us): " << stitchncopy_duration.count() << std::endl;
|
||||
done = high_resolution_clock::now();
|
||||
total_duration = duration_cast<microseconds>(done - before);
|
||||
// double rate = (1./((double)total_duration.count()/1000000));
|
||||
// std::cerr << "total_duration (us): " << total_duration.count() << " rate: " << rate << std::endl;
|
||||
counter += 1;
|
||||
}
|
||||
cameras.StopGrabbing();
|
||||
}
|
||||
|
@ -53,6 +53,7 @@ public slots:
|
||||
|
||||
signals:
|
||||
void terminated();
|
||||
void delayed(int, int);
|
||||
};
|
||||
|
||||
#endif // DUALCAMGRABBER_H
|
||||
|
@ -49,6 +49,7 @@ double DualcamWrapper::maxFrameRate(int camindex) {
|
||||
|
||||
|
||||
bool DualcamWrapper::frameRate(uint new_framerate, int camindex) {
|
||||
qDebug() << "Setting FrameRate to " << new_framerate << " for camera " << camindex;
|
||||
if (valid) {
|
||||
if (camindex == -1) {
|
||||
frameRate(new_framerate, 0);
|
||||
@ -72,6 +73,7 @@ bool DualcamWrapper::frameRate(uint new_framerate, int camindex) {
|
||||
|
||||
|
||||
double DualcamWrapper::frameRate(int camindex) {
|
||||
qDebug() << "Reading FrameRate from camera " << camindex;
|
||||
assert(camindex >= 0 && camindex < 2);
|
||||
double rate = -1.;
|
||||
if (valid) {
|
||||
@ -85,6 +87,7 @@ double DualcamWrapper::frameRate(int camindex) {
|
||||
|
||||
|
||||
double DualcamWrapper::exposureTime(int camindex) {
|
||||
qDebug() << "Reading ExposureTime from camera " << camindex;
|
||||
assert(camindex > 0 && camindex < 2);
|
||||
double time = -1.;
|
||||
if (valid) {
|
||||
@ -98,6 +101,7 @@ double DualcamWrapper::exposureTime(int camindex) {
|
||||
|
||||
|
||||
bool DualcamWrapper::exposureTime(double exposure_time, int camindex) {
|
||||
qDebug() << "Setting exposure time to " << exposure_time << " for camera " << camindex;
|
||||
if (valid) {
|
||||
if (camindex == -1) {
|
||||
exposureTime(exposure_time, 0);
|
||||
@ -110,6 +114,7 @@ bool DualcamWrapper::exposureTime(double exposure_time, int camindex) {
|
||||
exp_time.SetValue( exposure_time );
|
||||
GenApi::CEnumerationPtr(nodemap.GetNode( "ExposureTimeMode" ))->FromString("Timed");;
|
||||
} catch (...) {
|
||||
|
||||
qWarning() << "Could not set exposure for cam " << camindex;
|
||||
}
|
||||
}
|
||||
@ -142,6 +147,7 @@ uint32_t DualcamWrapper::sensorHeight(int camindex) {
|
||||
}
|
||||
|
||||
double DualcamWrapper::gain(int camindex) {
|
||||
qDebug() << "Reading Gain from camera " << camindex;
|
||||
assert(camindex >= 0 && camindex < 2);
|
||||
double gain = -1.;
|
||||
if (valid) {
|
||||
@ -298,23 +304,21 @@ GenApi::INodeMap& DualcamWrapper::getNodemap(int camindex){
|
||||
}
|
||||
|
||||
void DualcamWrapper::enableSoftwareTrigger(int camindex){
|
||||
qDebug() << "Enabling software trigger for camera " << camindex;
|
||||
GenApi::INodeMap &nodemap = getNodemap( camindex );
|
||||
// Enable triggered image acquisition for the Frame Start trigger
|
||||
Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("On");
|
||||
// Set the trigger source to Line 1
|
||||
Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Line1");
|
||||
// Set the trigger activation mode to level high
|
||||
Pylon::CEnumParameter(nodemap, "TriggerActivation").SetValue("LevelHigh");
|
||||
Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Software");
|
||||
// Pylon::CEnumParameter(nodemap, "TriggerActivation").SetValue("LevelHigh");
|
||||
}
|
||||
|
||||
void DualcamWrapper::disableSoftwareTrigger(int camindex){
|
||||
qDebug() << "Disabling software trigger for camera " << camindex;
|
||||
GenApi::INodeMap& nodemap = getNodemap(camindex);
|
||||
// Disable triggered image acquisition for the Frame Start trigger
|
||||
Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("Off");
|
||||
}
|
||||
|
||||
bool DualcamWrapper::softwareTriggerEnabeled(int camindex){
|
||||
qDebug() << "Checking software trigger for camera " << camindex;
|
||||
GenApi::INodeMap& nodemap = getNodemap(camindex);
|
||||
// Get the current state.
|
||||
return Pylon::CEnumParameter(nodemap, "TriggerMode").GetValue() == "On";
|
||||
}
|
Loading…
Reference in New Issue
Block a user