Compare commits
13 Commits
softwaretr
...
master
Author | SHA1 | Date | |
---|---|---|---|
cd9bbcfa62 | |||
0fa532d59a | |||
4f2d45df6b | |||
12c2f6292f | |||
6ec776cd4e | |||
222c3a9ce1 | |||
cb1c7772e8 | |||
63dbc21df8 | |||
6916899620 | |||
b221a03d43 | |||
f4231e281b | |||
6ef19c2375 | |||
767f8b7e42 |
@ -5,6 +5,7 @@
|
||||
|
||||
CamConfigurator::CamConfigurator(Pylon::DeviceInfoList &deviceList, QWidget *parent) :
|
||||
deviceList(deviceList), QDialog(parent), preview(nullptr) {
|
||||
qDebug() << "CamConfig constructor";
|
||||
mode_combo = new QComboBox(this);
|
||||
mode_combo->addItem("camera mode");
|
||||
mode_combo->addItem("single camera");
|
||||
@ -45,6 +46,7 @@ CamConfigurator::CamConfigurator(Pylon::DeviceInfoList &deviceList, QWidget *par
|
||||
connect(buttonbox, &QDialogButtonBox::accepted, this, &QDialog::accept);
|
||||
connect(buttonbox, &QDialogButtonBox::rejected, this, &QDialog::reject);
|
||||
vbox->addWidget(buttonbox);
|
||||
qDebug() << "CamConfig constructor done!";
|
||||
}
|
||||
|
||||
|
||||
@ -71,6 +73,7 @@ CameraSetup* CamConfigurator::stereoCameraView() {
|
||||
|
||||
|
||||
void CamConfigurator::modeChanged(int idx) {
|
||||
qDebug() << "Mode changed!";
|
||||
stack->setCurrentIndex(0);
|
||||
if (stack->count() > 1 && preview != nullptr) {
|
||||
qDebug() << "removing previous widget from stack";
|
||||
@ -115,10 +118,11 @@ void CamConfigurator::modeChanged(int idx) {
|
||||
|
||||
|
||||
void CamConfigurator::primaryDeviceChanged(int idx) {
|
||||
qDebug() << "Primary device changed!";
|
||||
if (mode_combo->currentText() == "single camera") { // single camera setting
|
||||
QString devicename = device_combo->currentText();
|
||||
preview->setPrimaryCamera(devicename);
|
||||
} else {
|
||||
} else if (mode_combo->currentText() == "stereo camera") { // stereo camera setting
|
||||
preview->switchArrangement();
|
||||
}
|
||||
}
|
||||
|
@ -9,29 +9,33 @@ CameraPreview::CameraPreview(QWidget *parent):cameraname(""), camera(nullptr), Q
|
||||
this->setLayout(new QVBoxLayout(this));
|
||||
this->layout()->addWidget(label);
|
||||
imgLabel = new QLabel(this);
|
||||
imgLabel->setMinimumSize(QSize(1024, 768));
|
||||
imgLabel->setMinimumSize(QSize(800, 600));
|
||||
this->layout()->addWidget(imgLabel);
|
||||
QWidget *controls = new QWidget(this);
|
||||
|
||||
width = new QSpinBox(controls);
|
||||
width->setMinimum(1);
|
||||
width->setMinimum(32);
|
||||
width->setMaximum(2048);
|
||||
width->setSingleStep(32);
|
||||
width->setValue(width->maximum());
|
||||
connect(width, SIGNAL(textChanged(QString)), SLOT(updateWidth(QString)));
|
||||
|
||||
height = new QSpinBox(controls);
|
||||
height->setMinimum(1);
|
||||
height->setSingleStep(32);
|
||||
height->setMinimum(32);
|
||||
height->setMaximum(1536);
|
||||
height->setValue(height->maximum());
|
||||
connect(height, SIGNAL(textChanged(QString)), SLOT(updateHeight(QString)));
|
||||
|
||||
xoffs = new QSpinBox(controls);
|
||||
xoffs->setSingleStep(8);
|
||||
xoffs->setMinimum(0);
|
||||
xoffs->setMaximum(2047);
|
||||
xoffs->setValue(0);
|
||||
connect(xoffs, SIGNAL(textChanged(QString)), SLOT(updateXoffs(QString)));
|
||||
|
||||
yoffs = new QSpinBox(controls);
|
||||
yoffs->setSingleStep(8);
|
||||
yoffs->setMinimum(0);
|
||||
yoffs->setMaximum(1535);
|
||||
yoffs->setValue(0);
|
||||
@ -53,12 +57,11 @@ CameraPreview::CameraPreview(QWidget *parent):cameraname(""), camera(nullptr), Q
|
||||
this->layout()->addWidget(controls);
|
||||
//setPrimaryCamera(devicename);
|
||||
qDebug() << "Camera preview constructor";
|
||||
takeStill();
|
||||
}
|
||||
|
||||
|
||||
void CameraPreview::setCamera(QString &device){
|
||||
qDebug() << "update camera! ";// << device.toStdString();
|
||||
qDebug() << "update camera! " << device;
|
||||
cameraname = device;
|
||||
if (camera != nullptr) {
|
||||
qDebug() << "camera is not nullptr! ";
|
||||
@ -77,7 +80,26 @@ void CameraPreview::setCamera(QString &device){
|
||||
msgBox.exec();
|
||||
return;
|
||||
}
|
||||
int64_t max_height = camera->sensorHeight();
|
||||
int64_t max_width = camera->sensorWidth();
|
||||
std::cerr << "set spin box values " << max_width << " " << max_height << std::endl;
|
||||
width->blockSignals(true);
|
||||
height->blockSignals(true);
|
||||
xoffs->blockSignals(true);
|
||||
yoffs->blockSignals(true);
|
||||
width->setMaximum(max_width);
|
||||
width->setValue(max_width);
|
||||
height->setMaximum(max_height);
|
||||
height->setValue(max_height);
|
||||
xoffs->setMaximum(max_width - 1);
|
||||
xoffs->setValue(0);
|
||||
yoffs->setMaximum(max_height - 1);
|
||||
yoffs->setValue(0);
|
||||
label->setText(device + " - " + camera->userName());
|
||||
width->blockSignals(false);
|
||||
height->blockSignals(false);
|
||||
xoffs->blockSignals(false);
|
||||
yoffs->blockSignals(false);
|
||||
takeStill();
|
||||
}
|
||||
|
||||
@ -85,17 +107,19 @@ void CameraPreview::setCamera(QString &device){
|
||||
void CameraPreview::takeStill() {
|
||||
qDebug() << "Take Still image!";
|
||||
if (camera != nullptr && camera->isOpen()) {
|
||||
MyImage mimg;
|
||||
ImageSettings s = camera->getImageSettings();
|
||||
MyImage mimg(s.width, s.height);
|
||||
bool valid = camera->grabFrame(mimg);
|
||||
if (!valid) {
|
||||
qWarning() << "Grabbing from camera failed!";
|
||||
return;
|
||||
}
|
||||
qDebug() << "Grabbed image from camera succeeded!";
|
||||
QImage img(static_cast<uchar *>(mimg.data()), mimg.width(), mimg.height(),
|
||||
QImage::Format::Format_Grayscale8);
|
||||
QPixmap mpm = QPixmap::fromImage(img);
|
||||
this->pm = mpm;
|
||||
mpm = mpm.scaledToWidth(1024);
|
||||
mpm = mpm.scaledToWidth(800);
|
||||
setImage(mpm);
|
||||
updateROI();
|
||||
} else {
|
||||
@ -111,7 +135,7 @@ void CameraPreview::updateWidth(QString s) {
|
||||
// if (xoffs->value() + width->value() > 2048) {
|
||||
// xoffs->setValue(2048 - width->value());
|
||||
// }
|
||||
validate(width, xoffs, 2048);
|
||||
validate(width, xoffs, camera->sensorWidth());
|
||||
updateROI();
|
||||
}
|
||||
|
||||
@ -119,7 +143,7 @@ void CameraPreview::updateXoffs(QString s) {
|
||||
// if (xoffs->value() + width->value() > 2048) {
|
||||
// width->setValue(2048 - xoffs->value());
|
||||
// }
|
||||
validate(xoffs, width, 2048);
|
||||
validate(xoffs, width, camera->sensorHeight());
|
||||
updateROI();
|
||||
}
|
||||
|
||||
@ -127,7 +151,7 @@ void CameraPreview::updateHeight(QString s) {
|
||||
// if (height->value() + yoffs->value() > 1536) {
|
||||
// yoffs->setValue(1536 - height->value());
|
||||
// }
|
||||
validate(height, yoffs, 1536);
|
||||
validate(height, yoffs, camera->sensorHeight());
|
||||
updateROI();
|
||||
}
|
||||
|
||||
@ -135,20 +159,33 @@ void CameraPreview::updateYoffs(QString s) {
|
||||
// if (height->value() + yoffs->value() > 1536) {
|
||||
// height->setValue(1536 - yoffs->value());
|
||||
// }
|
||||
validate(yoffs, height, 1536);
|
||||
validate(yoffs, height, camera->sensorWidth());
|
||||
updateROI();
|
||||
}
|
||||
|
||||
void CameraPreview::validate(QSpinBox *origin, QSpinBox *dest, int limit){
|
||||
qDebug() << "validate";
|
||||
int val = ensureDivbyfour(origin->value());
|
||||
origin->setValue(val);
|
||||
if (origin->value() + dest->value() > limit) {
|
||||
dest->setValue(limit - origin->value());
|
||||
}
|
||||
qDebug() << "validate done";
|
||||
}
|
||||
|
||||
int CameraPreview::ensureDivbyfour(int val) {
|
||||
// if (val % 4 != 0) {
|
||||
// int divisor = floor(val / 4);
|
||||
// val = divisor * 4;
|
||||
// }
|
||||
return val;
|
||||
}
|
||||
|
||||
void CameraPreview::updateROI(bool emitSignal) {
|
||||
qDebug() << "Update roi with signal: " << emitSignal;
|
||||
QImage img = pm.toImage();
|
||||
double scaling = 1024.0 / img.width();
|
||||
img = img.scaledToWidth(1024);
|
||||
double scaling = 800.0 / img.width();
|
||||
img = img.scaledToWidth(800);
|
||||
QPainter qPainter(&img);
|
||||
qPainter.setBrush(Qt::NoBrush);
|
||||
qPainter.setPen(Qt::red);
|
||||
@ -162,20 +199,24 @@ void CameraPreview::updateROI(bool emitSignal) {
|
||||
int rheight = round(height->value() * scaling);
|
||||
qPainter.drawRect(rxoffs, ryoffs, rwidth, rheight);
|
||||
bool bEnd = qPainter.end();
|
||||
|
||||
QPixmap npm = QPixmap::fromImage(img);
|
||||
setImage(npm);
|
||||
if (emitSignal) {
|
||||
emit roiUpdated(xoffs->value(), yoffs->value(), width->value(), height->value());
|
||||
}
|
||||
qDebug() << "update ROI done";
|
||||
}
|
||||
|
||||
void CameraPreview::setSize(int w, int h) {
|
||||
qDebug() << "set size " << w << " " << h;
|
||||
width->setValue(w);
|
||||
height->setValue(h);
|
||||
validate(width, xoffs, 2048);
|
||||
validate(height, yoffs, 1536);
|
||||
if (camera != nullptr && camera->isOpen()) {
|
||||
validate(width, xoffs, camera->sensorWidth());
|
||||
validate(height, yoffs, camera->sensorHeight());
|
||||
}
|
||||
updateROI(false);
|
||||
qDebug() << "set size done";
|
||||
}
|
||||
|
||||
QString CameraPreview::device(){
|
||||
@ -184,12 +225,14 @@ QString CameraPreview::device(){
|
||||
|
||||
ROI CameraPreview::getRoi() {
|
||||
ROI r;
|
||||
uint32_t max_height = camera->sensorHeight();
|
||||
uint32_t max_width = camera->sensorWidth();
|
||||
r.x = xoffs->value();
|
||||
r.y = yoffs->value();
|
||||
r.width = width->value();
|
||||
r.height = height->value();
|
||||
r.height = r.height > 1536 ? 1536 - r.y : r.height;
|
||||
r.width = r.width > 2048 ? 2048 - r.x : r.width;
|
||||
r.height = r.height > max_height ? max_height - r.y : r.height;
|
||||
r.width = r.width > max_width ? max_width - r.x : r.width;
|
||||
return r;
|
||||
}
|
||||
|
||||
|
@ -52,7 +52,7 @@ private:
|
||||
void takeStill();
|
||||
void setImage(const QPixmap &img);
|
||||
void validate(QSpinBox *origin, QSpinBox *dest, int limit);
|
||||
|
||||
int ensureDivbyfour(int value);
|
||||
PylonWrapper *camera;
|
||||
|
||||
};
|
||||
|
@ -10,6 +10,7 @@ SingleCamera::SingleCamera (QWidget *parent) :
|
||||
this->layout()->addWidget(camera1Preview);
|
||||
}
|
||||
|
||||
|
||||
CameraLayout SingleCamera::cameraLayout(){
|
||||
CameraLayout l;
|
||||
qDebug() << "Request layout";
|
||||
@ -20,6 +21,7 @@ CameraLayout SingleCamera::cameraLayout(){
|
||||
return l;
|
||||
}
|
||||
|
||||
|
||||
SingleCamera::~SingleCamera(){
|
||||
if (camera1Preview != nullptr) {
|
||||
delete camera1Preview;
|
||||
@ -33,7 +35,8 @@ DualCamera::DualCamera (QWidget *parent) :
|
||||
camera1Preview(nullptr),
|
||||
camera2Preview(nullptr),
|
||||
primary_device("") {
|
||||
qDebug() << "DualCamera View ... ";
|
||||
qDebug() << "DualCamera View constructor... ";
|
||||
std::cerr << "DualCamera View constructor... " << std::endl;
|
||||
this->setLayout(new QHBoxLayout(this));
|
||||
camera1Preview = new CameraPreview();
|
||||
camera2Preview = new CameraPreview();
|
||||
@ -46,11 +49,17 @@ DualCamera::DualCamera (QWidget *parent) :
|
||||
|
||||
|
||||
void DualCamera::updateROI1(int x, int y, int w, int h) {
|
||||
camera2Preview->setSize(w, h);
|
||||
qDebug() << "Update ROI1: x " << x << " y " << y << " w " << w << " h " << h;
|
||||
if (camera2Preview != nullptr) {
|
||||
camera2Preview->setSize(w, h);
|
||||
}
|
||||
}
|
||||
|
||||
void DualCamera::updateROI2(int x, int y, int w, int h) {
|
||||
camera1Preview->setSize(w, h);
|
||||
qDebug() << "Update ROI2: x " << x << " y " << y << " w " << w << " h " << h;
|
||||
if (camera1Preview != nullptr) {
|
||||
camera1Preview->setSize(w, h);
|
||||
}
|
||||
}
|
||||
|
||||
void DualCamera::switchArrangement() {
|
||||
|
@ -62,7 +62,7 @@ public:
|
||||
|
||||
|
||||
void setPrimaryCamera(QString &device) {
|
||||
qDebug() << "Update primary camera";
|
||||
qDebug() << "Update primary camera to " << device;
|
||||
if (camera1Preview != nullptr) {
|
||||
camera1Preview->setCamera(device);
|
||||
primary_device = device;
|
||||
@ -71,7 +71,7 @@ public:
|
||||
|
||||
|
||||
void setSecondaryCamera(QString &device) {
|
||||
qDebug()<< "Update secondary camera";
|
||||
qDebug()<< "Update secondary camera to " << device;
|
||||
if (camera2Preview != nullptr) {
|
||||
camera2Preview->setCamera(device);
|
||||
}
|
||||
|
@ -8,66 +8,78 @@ typedef high_resolution_clock Time;
|
||||
typedef milliseconds ms;
|
||||
typedef duration<float> fsec;
|
||||
|
||||
|
||||
void DualcamGrabber::run() {
|
||||
stop_request = false;
|
||||
size_t counter = 0;
|
||||
|
||||
if (wrapper->isOpen()) {
|
||||
Pylon::CInstantCameraArray &cameras = wrapper->getCameraArray();
|
||||
wrapper->frameRate(static_cast<uint>(framerate), -1);
|
||||
wrapper->exposureTime(exposure);
|
||||
wrapper->gain(gain);
|
||||
if (!wrapper->isOpen()) {
|
||||
return;
|
||||
}
|
||||
|
||||
Pylon::CInstantCameraArray &cameras = wrapper->getCameraArray();
|
||||
wrapper->frameRate(static_cast<uint>(100), -1);
|
||||
|
||||
wrapper->exposureTime(exposure);
|
||||
wrapper->gain(gain);
|
||||
wrapper->enableSoftwareTrigger(0);
|
||||
wrapper->enableSoftwareTrigger(1);
|
||||
|
||||
cameras.StartGrabbing();
|
||||
Pylon::CGrabResultPtr frame0, frame1;
|
||||
Pylon::CPylonImage leftImage;
|
||||
Pylon::CPylonImage rightImage;
|
||||
Pylon::CPylonImage stitchedImage;
|
||||
std::string errorMessage = "";
|
||||
bool failure = false;
|
||||
|
||||
auto before = high_resolution_clock::now();
|
||||
auto done = high_resolution_clock::now();
|
||||
auto total_duration = duration_cast<microseconds>(done - before);
|
||||
int expected_usecs = (int)(1./framerate * 1000000);
|
||||
|
||||
while (cameras.IsGrabbing() && !stop_request && !failure) {
|
||||
if (counter > 0) {
|
||||
long delay = total_duration.count() - expected_usecs;
|
||||
if (delay > 0) {
|
||||
emit delayed(delay, counter-1);
|
||||
} else {
|
||||
usleep(-delay);
|
||||
}
|
||||
}
|
||||
before = high_resolution_clock::now();
|
||||
|
||||
if (cameras[0].WaitForFrameTriggerReady(1000, Pylon::TimeoutHandling_ThrowException) &&
|
||||
cameras[1].WaitForFrameTriggerReady(1000, Pylon::TimeoutHandling_ThrowException)) {
|
||||
// std::cerr << "executing software triggers" << std::endl;
|
||||
cameras[0].ExecuteSoftwareTrigger();
|
||||
cameras[1].ExecuteSoftwareTrigger();
|
||||
}
|
||||
|
||||
cameras.StartGrabbing();
|
||||
Pylon::CGrabResultPtr frame0, frame1;
|
||||
Pylon::CPylonImage leftImage;
|
||||
Pylon::CPylonImage rightImage;
|
||||
Pylon::CPylonImage stitchedImage;
|
||||
std::string errorMessage = "";
|
||||
try {
|
||||
cameras[0].RetrieveResult( 5000, frame0, Pylon::TimeoutHandling_ThrowException );
|
||||
cameras[1].RetrieveResult( 5000, frame1, Pylon::TimeoutHandling_ThrowException );
|
||||
leftImage.AttachGrabResultBuffer( frame0 );
|
||||
rightImage.AttachGrabResultBuffer( frame1 );
|
||||
} catch( const Pylon::GenericException &e ) {
|
||||
qDebug() << "Grabbing frame failed! " << e.what();
|
||||
failure = true;
|
||||
}
|
||||
|
||||
// int ifi = 0;
|
||||
// int deviation = 0;
|
||||
// std::cerr << wrapper->frameRate(0) << "\t" << wrapper->frameRate(1) << "\t" << framerate << std::endl;
|
||||
// int desired_ifi = (1./wrapper->frameRate(0) * 1000000);
|
||||
// auto framestart = high_resolution_clock::now();
|
||||
while (cameras.IsGrabbing() && !stop_request) {
|
||||
// if (counter > 0) {
|
||||
// deviation = desired_ifi - ifi;
|
||||
// if (deviation > 0)
|
||||
// // usleep(deviation);
|
||||
// std::cerr << desired_ifi << "\t" << deviation << std::endl;
|
||||
// }
|
||||
// auto start = high_resolution_clock::now();
|
||||
MyImage *img = new MyImage();
|
||||
// auto stop1 = high_resolution_clock::now();
|
||||
cameras[0].RetrieveResult( 5000, frame0, Pylon::TimeoutHandling_ThrowException );
|
||||
// auto stop2 = high_resolution_clock::now();
|
||||
cameras[1].RetrieveResult( 5000, frame1, Pylon::TimeoutHandling_ThrowException );
|
||||
// auto stop3 = high_resolution_clock::now();
|
||||
leftImage.AttachGrabResultBuffer( frame0 );
|
||||
rightImage.AttachGrabResultBuffer( frame1 );
|
||||
if (leftImage.IsValid() && rightImage.IsValid()) {
|
||||
try {
|
||||
StitchImage::StitchToRight(leftImage, rightImage, &stitchedImage, errorMessage);
|
||||
img->setFrame(stitchedImage);
|
||||
buffer->push(img);
|
||||
} catch(const std::exception& e) {
|
||||
std::cerr << e.what() << '\n';
|
||||
}
|
||||
}
|
||||
// auto stop4 = high_resolution_clock::now();
|
||||
// auto duration1 = duration_cast<microseconds>(stop1 - start);
|
||||
// auto duration2 = duration_cast<microseconds>(stop2 - stop1);
|
||||
// auto duration3 = duration_cast<microseconds>(stop3 - stop2);
|
||||
// auto duration4 = duration_cast<microseconds>(stop4 - stop3);
|
||||
// std::cerr << "framecount: " << counter << " image constr: " << duration1.count() << "\t" << " retrieve1: " << duration2.count() << "\t" << " retrieve2: " << duration3.count() << "\t" << "conversion: " << duration4.count() << std::endl;
|
||||
// ifi = duration_cast<microseconds>(stop4 - framestart).count();
|
||||
// framestart = stop4;
|
||||
// if (counter > 0) {
|
||||
// std::cerr << "frame " << counter << " inter frame interval: " << ifi << "microseconds" << std::endl;
|
||||
// }
|
||||
counter += 1;
|
||||
if (!failure && leftImage.IsValid() && rightImage.IsValid()) {
|
||||
try {
|
||||
StitchImage::StitchToRight(leftImage, rightImage, &stitchedImage, errorMessage);
|
||||
MyImage *img = new MyImage(stitchedImage.GetWidth(), stitchedImage.GetHeight());
|
||||
img->setFrame(stitchedImage);
|
||||
buffer->push(img);
|
||||
} catch(const std::exception& e) {
|
||||
std::cerr << e.what() << '\n';
|
||||
}
|
||||
cameras.StopGrabbing();
|
||||
}
|
||||
|
||||
counter += 1;
|
||||
done = high_resolution_clock::now();
|
||||
total_duration = duration_cast<microseconds>(done - before);
|
||||
}
|
||||
cameras.StopGrabbing();
|
||||
}
|
||||
|
@ -53,6 +53,7 @@ public slots:
|
||||
|
||||
signals:
|
||||
void terminated();
|
||||
void delayed(int, int);
|
||||
};
|
||||
|
||||
#endif // DUALCAMGRABBER_H
|
||||
|
@ -49,6 +49,7 @@ double DualcamWrapper::maxFrameRate(int camindex) {
|
||||
|
||||
|
||||
bool DualcamWrapper::frameRate(uint new_framerate, int camindex) {
|
||||
qDebug() << "Setting FrameRate to " << new_framerate << " for camera " << camindex;
|
||||
if (valid) {
|
||||
if (camindex == -1) {
|
||||
frameRate(new_framerate, 0);
|
||||
@ -72,6 +73,7 @@ bool DualcamWrapper::frameRate(uint new_framerate, int camindex) {
|
||||
|
||||
|
||||
double DualcamWrapper::frameRate(int camindex) {
|
||||
qDebug() << "Reading FrameRate from camera " << camindex;
|
||||
assert(camindex >= 0 && camindex < 2);
|
||||
double rate = -1.;
|
||||
if (valid) {
|
||||
@ -85,6 +87,7 @@ double DualcamWrapper::frameRate(int camindex) {
|
||||
|
||||
|
||||
double DualcamWrapper::exposureTime(int camindex) {
|
||||
qDebug() << "Reading ExposureTime from camera " << camindex;
|
||||
assert(camindex > 0 && camindex < 2);
|
||||
double time = -1.;
|
||||
if (valid) {
|
||||
@ -98,28 +101,53 @@ double DualcamWrapper::exposureTime(int camindex) {
|
||||
|
||||
|
||||
bool DualcamWrapper::exposureTime(double exposure_time, int camindex) {
|
||||
qDebug() << "Setting exposure time to " << exposure_time << " for camera " << camindex;
|
||||
if (valid) {
|
||||
if (camindex == -1) {
|
||||
exposureTime(exposure_time, 0);
|
||||
exposureTime(exposure_time, 1);
|
||||
} else {
|
||||
GenApi::INodeMap& nodemap = getNodemap(camindex);
|
||||
double d = GenApi::CFloatPtr(nodemap.GetNode("ExposureTime"))->GetValue();
|
||||
GenApi::INode* n = nodemap.GetNode( "ExposureTime" );
|
||||
try {
|
||||
GenApi::CEnumerationPtr(nodemap.GetNode( "ExposureTimeMode" ))->FromString("Standard");
|
||||
GenApi::INodeMap& nodemap = getNodemap(camindex);
|
||||
GenApi::INode* n = nodemap.GetNode( "ExposureTime" );
|
||||
Pylon::CFloatParameter exp_time( n );
|
||||
exp_time.SetValue( exposure_time );
|
||||
GenApi::CEnumerationPtr(nodemap.GetNode( "ExposureTimeMode" ))->FromString("Timed");;
|
||||
} catch (...) {
|
||||
qWarning() << "Could not set exposure for cam0";
|
||||
|
||||
qWarning() << "Could not set exposure for cam " << camindex;
|
||||
}
|
||||
Pylon::CFloatParameter exp_time( n );
|
||||
exp_time.SetValue( exposure_time );
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t DualcamWrapper::sensorWidth(int camindex) {
|
||||
qDebug() << "Reading SensorWidth from camera " << camindex;
|
||||
assert(camindex >= 0 && camindex < 2);
|
||||
uint32_t width = -1;
|
||||
if (valid) {
|
||||
GenApi::INodeMap &nodemap = getNodemap(camindex);
|
||||
Pylon::CIntegerParameter pwidth( nodemap, "SensorWidth" );
|
||||
width = (uint32_t)pwidth.GetValue();
|
||||
}
|
||||
return width;
|
||||
}
|
||||
|
||||
uint32_t DualcamWrapper::sensorHeight(int camindex) {
|
||||
qDebug() << "Reading SensorHeight from camera " << camindex;
|
||||
assert(camindex >= 0 && camindex < 2);
|
||||
uint32_t height = -1;
|
||||
if (valid) {
|
||||
GenApi::INodeMap &nodemap = getNodemap(camindex);
|
||||
Pylon::CIntegerParameter pheight( nodemap, "SensorHeight" );
|
||||
height = (uint32_t)pheight.GetValue();
|
||||
}
|
||||
return height;
|
||||
}
|
||||
|
||||
double DualcamWrapper::gain(int camindex) {
|
||||
qDebug() << "Reading Gain from camera " << camindex;
|
||||
assert(camindex >= 0 && camindex < 2);
|
||||
double gain = -1.;
|
||||
if (valid) {
|
||||
@ -193,31 +221,35 @@ bool DualcamWrapper::grabFrame(MyImage &img, int camindex) {
|
||||
|
||||
void DualcamWrapper::setROI() {
|
||||
for (int camindex = 0; camindex < 2; camindex++){
|
||||
qDebug() << "Setting ROI: w" << layout.rois[camindex].width << " h: "<< layout.rois[camindex].height << " x " << layout.rois[camindex].x << " y " << layout.rois[camindex].y << std::endl;
|
||||
qDebug() << "Setting ROI: w" << layout.rois[camindex].width << " h: "<< layout.rois[camindex].height << " x " << layout.rois[camindex].x << " y " << layout.rois[camindex].y;
|
||||
try {
|
||||
GenApi::INodeMap &nodemap = getNodemap(camindex);
|
||||
Pylon::CIntegerParameter(nodemap, "Width").SetValue(layout.rois[camindex].width);
|
||||
Pylon::CIntegerParameter(nodemap, "Height").SetValue(layout.rois[camindex].height);
|
||||
Pylon::CIntegerParameter(nodemap, "OffsetX").SetValue(layout.rois[camindex].x);
|
||||
Pylon::CIntegerParameter(nodemap, "OffsetY").SetValue(layout.rois[camindex].y);
|
||||
} catch (const Pylon::GenericException &e) {
|
||||
std::cerr << e.GetDescription() << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void DualcamWrapper::resetCamera(int camindex) {
|
||||
GenApi::INodeMap &nodemap = getNodemap( camindex );
|
||||
int64_t dfltWidth = 2048;
|
||||
int64_t dfltHeight = 1536;
|
||||
qDebug() << "resetting camera to default ROI (" << dfltWidth << ", " << dfltHeight << ")";
|
||||
try {
|
||||
Pylon::CIntegerParameter(nodemap, "Width").SetValue(dfltWidth, false);
|
||||
Pylon::CIntegerParameter(nodemap, "Height").SetValue(dfltHeight, false);
|
||||
Pylon::CIntegerParameter(nodemap, "OffsetX").SetValue(0);
|
||||
Pylon::CIntegerParameter(nodemap, "OffsetY").SetValue(0);
|
||||
} catch (const Pylon::GenericException &e) {
|
||||
std::string message = e.GetDescription();
|
||||
std::cerr << "An exception occurred." << std::endl << e.GetDescription() << std::endl;
|
||||
valid = false;
|
||||
}
|
||||
GenApi::INodeMap &nodemap = getNodemap( camindex );
|
||||
uint32_t width = sensorWidth(camindex);
|
||||
uint32_t height = sensorHeight(camindex);
|
||||
qDebug() << "resetting camera to default ROI (" << width << ", " << height << ")";
|
||||
try {
|
||||
Pylon::CIntegerParameter(nodemap, "Width").SetValue(width, false);
|
||||
Pylon::CIntegerParameter(nodemap, "Height").SetValue(height, false);
|
||||
Pylon::CIntegerParameter(nodemap, "OffsetX").SetValue(0);
|
||||
Pylon::CIntegerParameter(nodemap, "OffsetY").SetValue(0);
|
||||
} catch (const Pylon::GenericException &e) {
|
||||
std::string message = e.GetDescription();
|
||||
std::cerr << "An exception occurred." << std::endl << e.GetDescription() << std::endl;
|
||||
valid = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -258,7 +290,7 @@ void DualcamWrapper::closeCameras() {
|
||||
}
|
||||
|
||||
Pylon::CInstantCameraArray &DualcamWrapper::getCameraArray() {
|
||||
return cameras;
|
||||
return cameras;
|
||||
}
|
||||
|
||||
// Pylon::CInstantCamera DualcamWrapper::getCamera(int camindex) {
|
||||
@ -267,6 +299,26 @@ Pylon::CInstantCameraArray &DualcamWrapper::getCameraArray() {
|
||||
|
||||
|
||||
GenApi::INodeMap& DualcamWrapper::getNodemap(int camindex){
|
||||
GenApi::INodeMap &nodemap = cameras[camindex].GetNodeMap();
|
||||
return nodemap;
|
||||
GenApi::INodeMap &nodemap = cameras[camindex].GetNodeMap();
|
||||
return nodemap;
|
||||
}
|
||||
|
||||
void DualcamWrapper::enableSoftwareTrigger(int camindex){
|
||||
qDebug() << "Enabling software trigger for camera " << camindex;
|
||||
GenApi::INodeMap &nodemap = getNodemap( camindex );
|
||||
Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("On");
|
||||
Pylon::CEnumParameter(nodemap, "TriggerSource").SetValue("Software");
|
||||
// Pylon::CEnumParameter(nodemap, "TriggerActivation").SetValue("LevelHigh");
|
||||
}
|
||||
|
||||
void DualcamWrapper::disableSoftwareTrigger(int camindex){
|
||||
qDebug() << "Disabling software trigger for camera " << camindex;
|
||||
GenApi::INodeMap& nodemap = getNodemap(camindex);
|
||||
Pylon::CEnumParameter(nodemap, "TriggerMode").SetValue("Off");
|
||||
}
|
||||
|
||||
bool DualcamWrapper::softwareTriggerEnabeled(int camindex){
|
||||
qDebug() << "Checking software trigger for camera " << camindex;
|
||||
GenApi::INodeMap& nodemap = getNodemap(camindex);
|
||||
return Pylon::CEnumParameter(nodemap, "TriggerMode").GetValue() == "On";
|
||||
}
|
@ -16,6 +16,8 @@ public:
|
||||
~DualcamWrapper();
|
||||
|
||||
ImageSettings getImageSettings(int camindex);
|
||||
uint32_t sensorWidth(int camindex);
|
||||
uint32_t sensorHeight(int camindex);
|
||||
bool isOpen();
|
||||
void terminate();
|
||||
bool openCameras(std::string &message);
|
||||
@ -28,6 +30,9 @@ public:
|
||||
bool exposureTime(double exposure_time, int camindex=-1);
|
||||
double gain(int camindex);
|
||||
bool gain(double gain_db, int camindex=-1);
|
||||
void enableSoftwareTrigger(int camindex);
|
||||
void disableSoftwareTrigger(int camindex);
|
||||
bool softwareTriggerEnabeled(int camindex);
|
||||
Pylon::CInstantCameraArray &getCameraArray();
|
||||
|
||||
private:
|
||||
|
@ -9,11 +9,12 @@ void Grabber::run() {
|
||||
camera->frameRate(static_cast<uint>(framerate));
|
||||
camera->exposureTime(exposure);
|
||||
camera->gain(gain);
|
||||
ImageSettings settings = camera->getImageSettings();
|
||||
Pylon::CInstantCamera *cam = camera->getCamera();
|
||||
Pylon::CGrabResultPtr frame;
|
||||
cam->StartGrabbing();
|
||||
while (camera->isOpen() && !stop_request) {
|
||||
MyImage *img = new MyImage();
|
||||
MyImage *img = new MyImage(settings.width, settings.height);
|
||||
cam->RetrieveResult( 5000, frame, Pylon::TimeoutHandling_ThrowException);
|
||||
img->setFrame(frame);
|
||||
buffer->push(img);
|
||||
|
19
myimage.cpp
19
myimage.cpp
@ -1,20 +1,28 @@
|
||||
#include "myimage.h"
|
||||
#include <chrono>
|
||||
#include "mylogger.h"
|
||||
|
||||
MyImage::MyImage()
|
||||
{}
|
||||
MyImage::MyImage(uint32_t width, uint32_t height): img_width(width), img_height(height)
|
||||
{
|
||||
buffer = new char[width * height];
|
||||
}
|
||||
|
||||
MyImage::MyImage(Pylon::CGrabResultPtr ptr) {
|
||||
setFrame(ptr);
|
||||
}
|
||||
|
||||
MyImage::~MyImage() {
|
||||
delete[] buffer;
|
||||
}
|
||||
|
||||
bool MyImage::setFrame(Pylon::CGrabResultPtr ptr) {
|
||||
qDebug() << "Setting frame from pointer";
|
||||
bool valid = ptr.IsValid() && ptr->GetWidth() <= max_width && ptr->GetHeight() <= max_height;
|
||||
if (valid) {
|
||||
img_index = ptr->GetID();
|
||||
img_width = ptr->GetWidth();
|
||||
img_height = ptr->GetHeight();
|
||||
memcpy(&buffer, ptr->GetBuffer(), ptr->GetImageSize());
|
||||
memcpy(buffer, ptr->GetBuffer(), ptr->GetImageSize());
|
||||
auto t = std::chrono::system_clock::now();
|
||||
img_timestamp = std::chrono::system_clock::to_time_t(t);
|
||||
}
|
||||
@ -22,11 +30,12 @@ bool MyImage::setFrame(Pylon::CGrabResultPtr ptr) {
|
||||
}
|
||||
|
||||
bool MyImage::setFrame( Pylon::CPylonImage &img) {
|
||||
qDebug() << "Setting frame from Pylon image (" << img.GetWidth() << "x" << img.GetHeight() << ")";
|
||||
bool valid = img.IsValid() && img.GetWidth() <= max_width && img.GetHeight() <= max_height;
|
||||
if (valid) {
|
||||
img_width = img.GetWidth();
|
||||
img_height = img.GetHeight();
|
||||
memcpy(&buffer, img.GetBuffer(), img.GetImageSize());
|
||||
memcpy(buffer, img.GetBuffer(), img.GetImageSize());
|
||||
auto t = std::chrono::system_clock::now();
|
||||
img_timestamp = std::chrono::system_clock::to_time_t(t);
|
||||
}
|
||||
@ -50,7 +59,7 @@ int64_t MyImage::index() {
|
||||
}
|
||||
|
||||
void *MyImage::data() {
|
||||
return &buffer;
|
||||
return buffer;
|
||||
}
|
||||
|
||||
time_t MyImage::timestamp() {
|
||||
|
@ -6,8 +6,9 @@
|
||||
class MyImage
|
||||
{
|
||||
public:
|
||||
MyImage();
|
||||
MyImage(uint32_t width, uint32_t height);
|
||||
MyImage(Pylon::CGrabResultPtr ptr);
|
||||
~MyImage();
|
||||
|
||||
int width();
|
||||
int height();
|
||||
@ -23,9 +24,9 @@ private:
|
||||
uint32_t img_height = 0;
|
||||
int64_t img_index = 0;
|
||||
time_t img_timestamp;
|
||||
static const int max_width = 4096;
|
||||
static const int max_height = 1536;
|
||||
char buffer[max_width * max_height];
|
||||
static const int max_width = 5184;
|
||||
static const int max_height = 2048;
|
||||
char *buffer; //[max_width * max_height];
|
||||
};
|
||||
|
||||
#endif // MYIMAGE_H
|
||||
|
@ -87,6 +87,7 @@ PylonRecorder::PylonRecorder(QWidget *parent)
|
||||
statusHeader->setStyleSheet("QLabel{font-size: 11pt;font-family: Arial; font-weight: Bold}");
|
||||
QLabel *fileHeader = new QLabel("Output file:");
|
||||
fileHeader->setStyleSheet("QLabel{font-size: 11pt;font-family: Arial; font-weight: Bold}");
|
||||
|
||||
statusBar()->addWidget(camHeader);
|
||||
statusBar()->addWidget(cameraConnectedLabel);
|
||||
statusBar()->addWidget(pressureLabel);
|
||||
@ -98,6 +99,7 @@ PylonRecorder::PylonRecorder(QWidget *parent)
|
||||
statusBar()->addWidget(writingLabel);
|
||||
statusBar()->addWidget(fileHeader);
|
||||
statusBar()->addWidget(fileLabel);
|
||||
|
||||
resize(QGuiApplication::primaryScreen()->availableSize() * 3 / 5);
|
||||
detectCameras();
|
||||
createActions();
|
||||
@ -206,6 +208,7 @@ bool PylonRecorder::loadFile(const QString &fileName) {
|
||||
|
||||
|
||||
void PylonRecorder::setImage(const QImage &newImage) {
|
||||
qDebug() << "Setting image";
|
||||
//FIXME figure out how to display both images. extract to extra class...
|
||||
image = newImage;
|
||||
// (image.colorSpace().isValid())
|
||||
@ -223,6 +226,7 @@ void PylonRecorder::setImage(const QImage &newImage) {
|
||||
applyScaling();
|
||||
}
|
||||
this->update();
|
||||
qDebug() << "Setting image done";
|
||||
}
|
||||
|
||||
|
||||
@ -608,16 +612,17 @@ void PylonRecorder::adjustScrollBar(QScrollBar *scrollBar, double factor) {
|
||||
|
||||
|
||||
void PylonRecorder::cameraConfiguration(){
|
||||
cameraConfigDialog = new CamConfigurator(deviceList, this);
|
||||
connect(cameraConfigDialog, SIGNAL(accepted()), SLOT(cameraConfigurationAccepted()));
|
||||
connect(cameraConfigDialog, SIGNAL(rejected()), SLOT(cameraConfigurationAborted()));
|
||||
// QObject::connect(&d, SIGNAL(column_visibility_changed(QString, QString,bool)), this, SLOT(visible_columns_update(QString, QString,bool)));
|
||||
cameraConfigDialog->exec();
|
||||
qDebug() << "Configuring camera(s)";
|
||||
cameraConfigDialog = new CamConfigurator(deviceList, this);
|
||||
connect(cameraConfigDialog, SIGNAL(accepted()), SLOT(cameraConfigurationAccepted()));
|
||||
connect(cameraConfigDialog, SIGNAL(rejected()), SLOT(cameraConfigurationAborted()));
|
||||
// QObject::connect(&d, SIGNAL(column_visibility_changed(QString, QString,bool)), this, SLOT(visible_columns_update(QString, QString,bool)));
|
||||
cameraConfigDialog->exec();
|
||||
}
|
||||
|
||||
|
||||
void PylonRecorder::cameraConfigurationAccepted() {
|
||||
qDebug() << "Cameras setting " << ((cameraConfigDialog->result()) ? "Accepted" : "Discarded");
|
||||
qDebug() << "Camera configuration " << ((cameraConfigDialog->result()) ? "Accepted" : "Discarded");
|
||||
this->layout = cameraConfigDialog->layout();
|
||||
camsconfigured = true;
|
||||
delete cameraConfigDialog;
|
||||
@ -627,12 +632,13 @@ void PylonRecorder::cameraConfigurationAccepted() {
|
||||
void PylonRecorder::cameraConfigurationAborted() {
|
||||
qDebug() << "Camera configuration aborted!";
|
||||
camsconfigured = false;
|
||||
delete cameraConfigDialog;
|
||||
}
|
||||
|
||||
|
||||
void PylonRecorder::connectCamera() {
|
||||
this->layout = CameraLayout();
|
||||
qDebug() << "connecting camera(s)";
|
||||
this->layout = CameraLayout();
|
||||
if (deviceList.size() == 0) {
|
||||
detectCameras();
|
||||
QMessageBox msgBox;
|
||||
@ -647,6 +653,8 @@ void PylonRecorder::connectCamera() {
|
||||
qDebug() << "cameras have not been properly configured!";
|
||||
return;
|
||||
}
|
||||
|
||||
//Single Camera mode
|
||||
if (layout.mode == CameraMode::single && layout.devices.size() == 1) {
|
||||
qDebug() << "single camera mode";
|
||||
std::string cname = layout.devices[0];
|
||||
@ -668,12 +676,15 @@ void PylonRecorder::connectCamera() {
|
||||
statusBar()->showMessage(QString::fromStdString(message));
|
||||
updateActions();
|
||||
}
|
||||
|
||||
// Dual Camera mode
|
||||
if (layout.mode == CameraMode::dual && layout.devices.size() == 2) {
|
||||
qDebug() << "dual camera mode";
|
||||
std::string message;
|
||||
dualcam = new DualcamWrapper(layout);
|
||||
bool success = dualcam->openCameras(message);
|
||||
if (success) {
|
||||
qDebug() << "Dual cameras connected";
|
||||
cameraConnectedLabel->setText("connected");
|
||||
cameraConnectedLabel->setStyleSheet("QLabel { font-size: 10px;font-family: Arial;color: green;}");
|
||||
cameraOpened = true;
|
||||
@ -843,10 +854,10 @@ void PylonRecorder::startDualcamRecording() {
|
||||
connect(writer, SIGNAL(writingDone()), this, SLOT(writerDone()));
|
||||
writer->setVideoSpecs(specs);
|
||||
|
||||
qDebug() << "push metadata to writer";
|
||||
QSettings s;
|
||||
this->mdata.read(s);
|
||||
writer->setProjectMetadata(mdata);
|
||||
// qDebug() << "push metadata to writer";
|
||||
// QSettings s;
|
||||
// this->mdata.read(s);
|
||||
// writer->setProjectMetadata(mdata);
|
||||
|
||||
dryRun = dryRunCheckBox->isChecked();
|
||||
buffer->clear();
|
||||
@ -895,10 +906,8 @@ void PylonRecorder::stopRecording() {
|
||||
stopRequest = true;
|
||||
grab_stop_action->setEnabled(false);
|
||||
qDebug() << "StopRecording: clear buffer!";
|
||||
if(buffer != nullptr) {
|
||||
buffer->clear();
|
||||
if (dryRun)
|
||||
writerDone();
|
||||
if (dryRun){
|
||||
writerDone();
|
||||
}
|
||||
}
|
||||
qDebug() << "StopRecording done!";
|
||||
@ -919,8 +928,12 @@ void PylonRecorder::writerDone() {
|
||||
}
|
||||
if (writer != nullptr)
|
||||
writer->wait(10000);
|
||||
if(buffer != nullptr) {
|
||||
buffer->clear();
|
||||
}
|
||||
writing = false;
|
||||
updateActions();
|
||||
|
||||
qInfo() << "writer is Done!";
|
||||
}
|
||||
|
||||
@ -933,6 +946,7 @@ void PylonRecorder::displayActivity() {
|
||||
|
||||
|
||||
void PylonRecorder::displaySingleFrame() {
|
||||
qDebug() << "display single frame";
|
||||
MyImage *img;
|
||||
size_t fc = 0;
|
||||
img = buffer->readLast(fc);
|
||||
@ -940,7 +954,7 @@ void PylonRecorder::displaySingleFrame() {
|
||||
QImage qimg(static_cast<uchar *>(img->data()), img->width(), img->height(), QImage::Format::Format_Grayscale8);
|
||||
setImage(qimg);
|
||||
}else {
|
||||
std::cerr << "Error reading last image" << std::endl;
|
||||
qDebug() << "Error reading last image";
|
||||
}
|
||||
}
|
||||
|
||||
@ -986,7 +1000,8 @@ void PylonRecorder::displayBufferPressure() {
|
||||
void PylonRecorder::grabStillFromPylon() {
|
||||
qDebug() << "Grab still image form camera!";
|
||||
if (singlecam != nullptr && singlecam->isOpen()) {
|
||||
MyImage img;
|
||||
ImageSettings s = singlecam->getImageSettings();
|
||||
MyImage img(s.width, s.height);
|
||||
bool valid = singlecam->grabFrame(img);
|
||||
if (valid) {
|
||||
QImage qimg(static_cast<uchar *>(img.data()), img.width(), img.height(),
|
||||
@ -996,7 +1011,7 @@ void PylonRecorder::grabStillFromPylon() {
|
||||
} else {
|
||||
statusBar()->showMessage(tr("Camera is not open! Connect to camera first!"));
|
||||
}
|
||||
//FIXME does not work for single camera mode!
|
||||
//FIXME does not work for stereo camera mode!
|
||||
qDebug() << "grabbing still image done!";
|
||||
}
|
||||
|
||||
|
@ -157,14 +157,43 @@ bool PylonWrapper::grabFrame(MyImage &img) {
|
||||
camera->StartGrabbing();
|
||||
camera->RetrieveResult( 5000, frame, Pylon::TimeoutHandling_ThrowException);
|
||||
camera->StopGrabbing();
|
||||
qDebug() << "grabFrame done";
|
||||
}
|
||||
|
||||
img.setFrame(frame);
|
||||
return frame.IsValid();
|
||||
}
|
||||
|
||||
uint32_t PylonWrapper::sensorWidth() {
|
||||
qDebug() << "Reading SensorWidth";
|
||||
uint32_t width = -1;
|
||||
if (valid) {
|
||||
qDebug() << "SensorWidth available";
|
||||
GenApi::INodeMap &nodemap = camera->GetNodeMap();
|
||||
if (GenApi::IsAvailable(nodemap.GetNode("SensorWidth"))) {
|
||||
Pylon::CIntegerParameter pwidth( nodemap, "SensorWidth" );
|
||||
width = (uint32_t)pwidth.GetValue();
|
||||
}
|
||||
}
|
||||
return width;
|
||||
}
|
||||
|
||||
uint32_t PylonWrapper::sensorHeight() {
|
||||
qDebug() << "Reading SensorHeight";
|
||||
uint32_t height = -1;
|
||||
if (valid){
|
||||
GenApi::INodeMap &nodemap = camera->GetNodeMap();
|
||||
if (GenApi::IsAvailable(nodemap.GetNode("SensorHeight"))) {
|
||||
Pylon::CIntegerParameter pheight( nodemap, "SensorHeight" );
|
||||
height = (uint32_t)pheight.GetValue();
|
||||
}
|
||||
}
|
||||
return height;
|
||||
}
|
||||
|
||||
void PylonWrapper::resetCamera() {
|
||||
int64_t dfltWidth = 2048;
|
||||
int64_t dfltHeight = 1536;
|
||||
uint32_t dfltWidth = sensorWidth();
|
||||
uint32_t dfltHeight = sensorHeight();
|
||||
qDebug() << "resetting camera to default ROI (" << dfltWidth << ", " << dfltHeight << ")";
|
||||
try {
|
||||
GenApi::INodeMap& nodemap = camera->GetNodeMap();
|
||||
@ -179,6 +208,7 @@ void PylonWrapper::resetCamera() {
|
||||
std::cerr << "An exception occurred." << std::endl << e.GetDescription() << std::endl;
|
||||
valid = false;
|
||||
}
|
||||
qDebug() << "resetting camera to default ROI done";
|
||||
}
|
||||
|
||||
bool PylonWrapper::openCamera(std::string &message) {
|
||||
@ -236,5 +266,12 @@ Pylon::CInstantCamera *PylonWrapper::getCamera() {
|
||||
QString PylonWrapper::userName() {
|
||||
GenApi::INodeMap& nodemap = camera->GetNodeMap();
|
||||
QString username = Pylon::CStringParameter(nodemap, "DeviceUserID").GetValue().c_str();
|
||||
if (username.length() == 0) {
|
||||
username = Pylon::CStringParameter(nodemap, "DeviceModelName").GetValue().c_str();
|
||||
}
|
||||
return username;
|
||||
}
|
||||
|
||||
QString PylonWrapper::deviceName() {
|
||||
return QString::fromStdString(fullName);
|
||||
}
|
@ -27,9 +27,12 @@ public:
|
||||
bool exposureTime(double exposure_time);
|
||||
double gain();
|
||||
bool gain(double gain_db);
|
||||
uint32_t sensorHeight();
|
||||
uint32_t sensorWidth();
|
||||
QString userName();
|
||||
Pylon::CInstantCamera *getCamera();
|
||||
void resetCamera();
|
||||
QString deviceName();
|
||||
|
||||
private:
|
||||
Pylon::CInstantCamera *camera;
|
||||
|
106
writer.cpp
106
writer.cpp
@ -2,6 +2,12 @@
|
||||
#include <chrono>
|
||||
#include <fstream>
|
||||
#include <pylon/VideoWriter.h>
|
||||
#include <chrono>
|
||||
using namespace std::chrono;
|
||||
typedef high_resolution_clock Time;
|
||||
typedef milliseconds ms;
|
||||
typedef duration<float> fsec;
|
||||
|
||||
|
||||
void Writer::setVideoSpecs(VideoSpecs specs) {
|
||||
videoSpecs = specs;
|
||||
@ -41,111 +47,35 @@ void Writer::run() {
|
||||
qDebug() << "checks done!";
|
||||
|
||||
Pylon::CVideoWriter videoWriter;
|
||||
if (specs_valid) {
|
||||
if (specs_valid && videoSpecs.format != VideoFormat::raw) {
|
||||
stop_request = false;
|
||||
stopNow = false;
|
||||
if (videoSpecs.format == VideoFormat::raw) {
|
||||
myFile.open(videoSpecs.filename, std::ios::out | std::ios::binary);
|
||||
myFile.write((char*)&videoSpecs.width, 4);
|
||||
myFile.write((char*)&videoSpecs.height, 4);
|
||||
} else {
|
||||
qDebug() << "setting parameters for video";
|
||||
videoWriter.SetParameter((uint32_t)videoSpecs.width, (uint32_t)videoSpecs.height, videoSpecs.pixelType, (double)videoSpecs.fps, videoSpecs.quality);
|
||||
videoWriter.Open(videoSpecs.filename.c_str());
|
||||
}
|
||||
|
||||
nix::File nix_file =nix::File::open(videoSpecs.filename + ".nix", nix::FileMode::Overwrite, "hdf5", nix::Compression::DeflateNormal);
|
||||
nix::Block b = nix_file.createBlock("Recording", "nix.recording");
|
||||
nix::Section s = nix_file.createSection("Recording", "nix.recording");
|
||||
b.metadata(s);
|
||||
nix::Value v(nix::util::timeToStr(std::chrono::system_clock::to_time_t(std::chrono::system_clock::now())));
|
||||
s.createProperty("date", v);
|
||||
|
||||
nix::Value fn(videoSpecs.filename);
|
||||
s.createProperty("moviefile", fn);
|
||||
nix::Section sw_sec = s.createSection("PylonRecorder", "nix.software");
|
||||
sw_sec.createProperty("version", nix::Value(1));
|
||||
|
||||
nix::Section hw_sec = s.createSection("Basler ACA2040-120um", "nix.hardware.camera");
|
||||
hw_sec.createProperty("type", nix::Value("monochrome"));
|
||||
hw_sec.createProperty("manufacturer", nix::Value("Basler AG"));
|
||||
nix::Property p = hw_sec.createProperty("framerate", nix::Value(static_cast<int>(videoSpecs.fps)));
|
||||
p.unit("Hz");
|
||||
nix::Property p1 = hw_sec.createProperty("exposure time", nix::Value(static_cast<int>(videoSpecs.exposureTime)));
|
||||
p1.unit("us");
|
||||
nix::Property p2 = hw_sec.createProperty("detector gain", nix::Value(static_cast<int>(videoSpecs.detectorGain)));
|
||||
p2.unit("dB");
|
||||
|
||||
if (metadata_valid) {
|
||||
writeMetadata(s);
|
||||
}
|
||||
|
||||
nix::NDSize initial_shape(1, chunksize);
|
||||
nix::DataArray frametimes = b.createDataArray("frametimes", "nix.imaging.frametimes", nix::DataType::String, initial_shape);
|
||||
frametimes.label("time");
|
||||
frametimes.appendSetDimension();
|
||||
nix::DataArray frameindices = b.createDataArray("frameindex", "nix.imaging.frameid", nix::DataType::Int64, initial_shape);
|
||||
frameindices.appendSetDimension();
|
||||
|
||||
std::vector<std::string> stamps_buffer(chunksize);
|
||||
std::vector<int64_t> ids_buffer(chunksize);
|
||||
|
||||
nix::NDSize offset(1, 0);
|
||||
nix::NDSize current_shape(initial_shape);
|
||||
nix::NDSize chunk_shape(1, chunksize);
|
||||
|
||||
videoWriter.SetParameter((uint32_t)videoSpecs.width, (uint32_t)videoSpecs.height,
|
||||
videoSpecs.pixelType, (double)videoSpecs.fps, videoSpecs.quality);
|
||||
videoWriter.Open(videoSpecs.filename.c_str());
|
||||
qDebug() << "preparations done, starting loop!";
|
||||
while ((!stop_request || buffer->bufferLoad() > 0) && !stopNow) {
|
||||
if (buffer->bufferLoad() > 0 ) {
|
||||
size_t framecount = 0;
|
||||
MyImage *img = buffer->read(framecount);
|
||||
if (img != nullptr) {
|
||||
if (videoSpecs.format == VideoFormat::raw) {
|
||||
myFile.write((char*)img->data(), img->size());
|
||||
} else {
|
||||
Pylon::CPylonImage pyImage;
|
||||
try {
|
||||
pyImage.AttachUserBuffer(img->data(), videoSpecs.width * videoSpecs.height, videoSpecs.pixelType, videoSpecs.width, videoSpecs.height, 0, videoSpecs.orientation);
|
||||
videoWriter.Add(pyImage);
|
||||
} catch (const Pylon::GenericException &e) {
|
||||
std::cerr << "Writer::run: An exception occurred." << std::endl << e.GetDescription() << std::endl;
|
||||
}
|
||||
}
|
||||
if (count < chunksize) {
|
||||
try {
|
||||
stamps_buffer[count] = nix::util::timeToStr(img->timestamp());
|
||||
} catch (...) {
|
||||
std::cerr << "Bad time to string conversion " << img->timestamp() << std::endl;
|
||||
stamps_buffer[count] = "invalid";
|
||||
}
|
||||
ids_buffer[count] = img->index();
|
||||
count ++;
|
||||
} else {
|
||||
frametimes.setData(nix::DataType::String, stamps_buffer.data(), chunk_shape, offset);
|
||||
frameindices.setData(nix::DataType::Int64, ids_buffer.data(), chunk_shape, offset);
|
||||
current_shape += initial_shape;
|
||||
frametimes.dataExtent(current_shape);
|
||||
frameindices.dataExtent(current_shape);
|
||||
offset[0] += chunksize;
|
||||
count = 0;
|
||||
Pylon::CPylonImage pyImage;
|
||||
try {
|
||||
pyImage.AttachUserBuffer(img->data(), videoSpecs.width * videoSpecs.height, videoSpecs.pixelType, videoSpecs.width, videoSpecs.height, 0, videoSpecs.orientation);
|
||||
videoWriter.Add(pyImage);
|
||||
} catch (const Pylon::GenericException &e) {
|
||||
std::cerr << "Writer::run: An exception occurred." << std::endl << e.GetDescription() << std::endl;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while (buffer->bufferLoad() < 1 && !stop_request) {
|
||||
msleep(5);
|
||||
msleep(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (count > 0) {
|
||||
chunk_shape[0] = count;
|
||||
frametimes.setData(nix::DataType::String, stamps_buffer.data(), chunk_shape, offset);
|
||||
frameindices.setData(nix::DataType::Int64, ids_buffer.data(), chunk_shape, offset);
|
||||
}
|
||||
videoWriter.Close();
|
||||
myFile.close();
|
||||
nix_file.close();
|
||||
} else {
|
||||
std::cerr << "Got no video specifications, not writing!" << std::endl;
|
||||
qDebug() << "Got no video specifications, not writing!";
|
||||
}
|
||||
emit writingDone();
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user