diff --git a/AcqSchedule.cpp b/AcqSchedule.cpp
new file mode 100644
index 0000000..fadfd77
--- /dev/null
+++ b/AcqSchedule.cpp
@@ -0,0 +1,49 @@
+/*
+ AcqSchedule.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/07/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file AcqSchedule.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 19/06/2014
+* \brief
+*/
+
+#include "AcqSchedule.h"
+
+AcqSchedule::AcqSchedule(int H, int M, int S, int E, int G, int F, int N):
+ mH(H), mM(M), mS(S), mE(E), mG(G), mN(N), mF(F) {
+
+}
+
+AcqSchedule::AcqSchedule():
+ mH(0), mM(0), mS(0), mE(0), mG(0), mN(0), mF(0) {
+
+}
+
+AcqSchedule::~AcqSchedule() {};
+
diff --git a/AcqSchedule.h b/AcqSchedule.h
new file mode 100644
index 0000000..58adc70
--- /dev/null
+++ b/AcqSchedule.h
@@ -0,0 +1,145 @@
+/*
+ AcqSchedule.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/10/2014
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file AcqSchedule.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 19/06/2014
+* \brief
+*/
+
+#pragma once
+
+#include
+#include
+
+using namespace std;
+
+class AcqSchedule{
+
+ private:
+
+ int mH; // Hours
+ int mM; // Minutes
+ int mS; // Seconds
+ int mE; // Exposure time
+ int mG; // Gain
+ int mN; // Repetition number
+ int mF; // Format
+ string mDate;
+
+ public:
+
+ /**
+ * Constructor.
+ *
+ * @param H Hour.
+ * @param M Minutes.
+ * @param S Seconds.
+ * @param E Exposure time.
+ * @param G Gain.
+ * @param F Format.
+ * @param N Repetition number.
+ */
+ AcqSchedule(int H, int M, int S, int E, int G, int F, int N);
+
+ /**
+ * Constructor.
+ *
+ */
+ AcqSchedule();
+
+ /**
+ * Destructor.
+ *
+ */
+ ~AcqSchedule();
+
+ /**
+ * Get acquisition hours.
+ *
+ * @return Hours.
+ */
+ int getH() {return mH;};
+
+ /**
+ * Get acquisition minutes.
+ *
+ * @return Minutes.
+ */
+ int getM() {return mM;};
+
+ /**
+ * Get acquisition seconds.
+ *
+ * @return Seconds.
+ */
+ int getS() {return mS;};
+
+ /**
+ * Get acquisition exposure time value.
+ *
+ * @return Exposure time.
+ */
+ int getE() {return mE;};
+
+ /**
+ * Get acquisition gain.
+ *
+ * @return Gain.
+ */
+ int getG() {return mG;};
+
+ /**
+ * Get acquisition format.
+ *
+ * @return Format : 8 or 12.
+ */
+ int getF() {return mF;};
+
+ /**
+ * Get acquisition repetition number.
+ *
+ * @return Repetition number.
+ */
+ int getN() {return mN;};
+
+ /**
+ * Set acquisition date.
+ *
+ * @param Date : YYYY-MM-DDTHH:MM:SS,fffffffff
+ */
+ void setDate(string date) {mDate = date;};
+
+ /**
+ * Get acquisition date.
+ *
+ * @return Date : YYYY-MM-DDTHH:MM:SS,fffffffff
+ */
+ string getDate() {return mDate;};
+
+};
diff --git a/AcqThread.cpp b/AcqThread.cpp
new file mode 100644
index 0000000..696cc02
--- /dev/null
+++ b/AcqThread.cpp
@@ -0,0 +1,1223 @@
+/*
+ AcqThread.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2016 Yoan Audureau, Chiara Marmo
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 03/10/2016
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file AcqThread.cpp
+* \author Yoan Audureau, Chiara Marmo -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 21/01/2015
+* \brief Acquisition thread.
+*/
+
+#include "AcqThread.h"
+
+boost::log::sources::severity_logger< LogSeverityLevel > AcqThread::logger;
+
+AcqThread::Init AcqThread::initializer;
+
+AcqThread::AcqThread( boost::circular_buffer *fb,
+// vector *frame_sprite,
+ boost::mutex *fb_m,
+ boost::condition_variable *fb_c,
+ bool *sSignal,
+ boost::mutex *sSignal_m,
+ boost::condition_variable *sSignal_c,
+ bool *dSignal,
+ boost::mutex *dSignal_m,
+ boost::condition_variable *dSignal_c,
+ DetThread *detection,
+ StackThread *stack,
+ int cid,
+ dataParam dp,
+ stackParam sp,
+ stationParam stp,
+ detectionParam dtp,
+ cameraParam acq,
+ framesParam fp,
+ videoParam vp,
+ fitskeysParam fkp) {
+
+ frameBuffer = fb;
+ // frameSprite = frame_sprite;
+ frameBuffer_mutex = fb_m;
+ frameBuffer_condition = fb_c;
+ stackSignal = sSignal;
+ stackSignal_mutex = sSignal_m;
+ stackSignal_condition = sSignal_c;
+ detSignal = dSignal;
+ detSignal_mutex = dSignal_m;
+ detSignal_condition = dSignal_c;
+ pDetection = detection;
+ pStack = stack;
+ mThread = NULL;
+ //sprite = NULL;
+ mMustStop = false;
+ mDevice = NULL;
+ mThreadTerminated = false;
+ mNextAcqIndex = 0;
+ pExpCtrl = NULL;
+ mDeviceID = cid;
+ mdp = dp;
+ msp = sp;
+ mstp = stp;
+ mdtp = dtp;
+ mcp = acq;
+ mvp = vp;
+ mfp = fp;
+
+ //mut_sprite = new mutex();
+ spr_reader;
+
+}
+
+AcqThread::~AcqThread(void){
+
+ if(mDevice != NULL)
+ delete mDevice;
+
+ if(mThread != NULL)
+ delete mThread;
+
+ if(pExpCtrl != NULL)
+ delete pExpCtrl;
+
+ /*if(sprite != NULL)
+ delete sprite;*/
+
+}
+
+void AcqThread::stopThread(){
+
+ mMustStopMutex.lock();
+ mMustStop = true;
+ mMustStopMutex.unlock();
+
+ if(mThread != NULL)
+ while(mThread->timed_join(boost::posix_time::seconds(2)) == false)
+ mThread->interrupt();
+
+}
+
+bool AcqThread::startThread() {
+
+ // Create a device.
+ mDevice = new Device(mcp, mfp, mvp, mDeviceID);
+
+ // Search available devices.
+ mDevice->listDevices(false);
+
+ // CREATE CAMERA
+ if(!mDevice->createCamera())
+ return false;
+
+ // Prepare continuous acquisition.
+ if(!prepareAcquisitionOnDevice())
+ return false;
+
+ // Create acquisition thread.
+ mThread = new boost::thread(boost::ref(*this));
+ //sprite = new SpriteThread(std::ref(mut_sprite),frameSprite, mDevice, spr_reader, frameSprite->capacity());
+
+ return true;
+
+}
+
+bool AcqThread::getThreadStatus(){
+
+ return mThreadTerminated;
+
+}
+
+void AcqThread::operator()(){
+
+ bool stop = false;
+
+ BOOST_LOG_SCOPED_THREAD_TAG("LogName", "ACQ_THREAD");
+ BOOST_LOG_SEV(logger,notification) << "\n";
+ BOOST_LOG_SEV(logger,notification) << "==============================================";
+ BOOST_LOG_SEV(logger,notification) << "========== START ACQUISITION THREAD ==========";
+ BOOST_LOG_SEV(logger,notification) << "==============================================";
+
+ try {
+
+ // Search next acquisition according to the current time.
+ selectNextAcquisitionSchedule(TimeDate::splitIsoExtendedDate(to_iso_extended_string(boost::posix_time::microsec_clock::universal_time())));
+
+ // Exposure adjustment variables.
+ bool exposureControlStatus = false;
+ bool exposureControlActive = false;
+ bool cleanStatus = false;
+
+ // If exposure can be set on the input device.
+ if(mDevice->getExposureStatus()) {
+
+ pExpCtrl = new ExposureControl( mcp.EXPOSURE_CONTROL_FREQUENCY,
+ mcp.EXPOSURE_CONTROL_SAVE_IMAGE,
+ mcp.EXPOSURE_CONTROL_SAVE_INFOS,
+ mdp.DATA_PATH,
+ mstp.STATION_NAME);
+ }
+
+ TimeMode previousTimeMode = NONE;
+
+ /// Acquisition process.
+ do {
+
+ // Location of a video or frames if input type is FRAMES or VIDEO.
+ string location = "";
+
+ // Load videos file or frames directory if input type is FRAMES or VIDEO
+ if(!mDevice->loadNextCameraDataSet(location)) break;
+
+ if(pDetection != NULL) pDetection->setCurrentDataSet(location);
+
+ // Reference time to compute interval between regular captures.
+ string cDate = to_simple_string(boost::posix_time::microsec_clock::universal_time());
+ string refDate = cDate.substr(0, cDate.find("."));
+
+ chrono::duration dur_elapsed;
+ chrono::time_point start;
+
+ do {
+ start = chrono::high_resolution_clock::now();
+
+ // Container for the grabbed image.
+ Frame newFrame;
+
+ // Time counter of grabbing a frame.
+ double tacq = (double)getTickCount();
+
+ // Grab a frame.
+ if(mDevice->runContinuousCapture(newFrame)) {
+
+ BOOST_LOG_SEV(logger, normal) << "============= FRAME " << newFrame.mFrameNumber << " ============= ";
+ cout << "============= FRAME " << newFrame.mFrameNumber << " ============= " << endl;
+
+
+ if(spr_reader.extractValueForKeyword("ACQ_SPRITE_ENABLED")=="true")
+ {
+ //***Here, we (Matthieu and Sebastien) add a function to only analyse when the sun is "sleeping"
+ string vec_time = to_simple_string(boost::posix_time::microsec_clock::universal_time());
+ int pos_space = vec_time.find(' ');
+ int time_hour = atoi(vec_time.substr(pos_space+1,3).c_str());
+
+ /*if(time_hour>=21 || time_hour<=2)
+ {
+ sprite->addFrame(newFrame);
+ }*/
+
+ //sprite->addFrame(newFrame);
+
+
+
+ }
+
+
+ // If camera type in input is FRAMES or VIDEO.
+ if(mDevice->mVideoFramesInput) {
+
+ // Push the new frame in the framebuffer.
+ boost::mutex::scoped_lock lock(*frameBuffer_mutex);
+ frameBuffer->push_back(newFrame);
+ lock.unlock();
+
+ // Notify detection thread.
+ if(pDetection != NULL) {
+
+ boost::mutex::scoped_lock lock2(*detSignal_mutex);
+ *detSignal = true;
+ detSignal_condition->notify_one();
+ lock2.unlock();
+
+ }
+
+ // Slow down the time in order to give more time to the detection process.
+ int twait = 100;
+ if(mvp.INPUT_TIME_INTERVAL == 0 && mfp.INPUT_TIME_INTERVAL > 0)
+ twait = mfp.INPUT_TIME_INTERVAL;
+ else if(mvp.INPUT_TIME_INTERVAL > 0 && mfp.INPUT_TIME_INTERVAL == 0)
+ twait = mvp.INPUT_TIME_INTERVAL;
+ #ifdef WINDOWS
+ Sleep(twait);
+ #else
+ #ifdef LINUX
+ usleep(twait * 1000);
+ #endif
+ #endif
+
+
+ }else {
+
+ // Get current time in seconds.
+ int currentTimeInSec = newFrame.mDate.hours * 3600 + newFrame.mDate.minutes * 60 + (int)newFrame.mDate.seconds;
+
+ // Detect day or night.
+ TimeMode currentTimeMode = NONE;
+
+ if((currentTimeInSec > mStopSunsetTime) || (currentTimeInSec < mStartSunriseTime)) {
+ currentTimeMode = NIGHT;
+ }else if((currentTimeInSec > mStartSunriseTime) && (currentTimeInSec < mStopSunsetTime)) {
+ currentTimeMode = DAY;
+ }
+
+ // If exposure control is not active, the new frame can be shared with others threads.
+ if(!exposureControlStatus) {
+
+
+ // Push the new frame in the framebuffer.
+ boost::mutex::scoped_lock lock(*frameBuffer_mutex);
+ frameBuffer->push_back(newFrame);
+ lock.unlock();
+
+ // Notify detection thread.
+ if(pDetection != NULL) {
+
+ if(previousTimeMode != currentTimeMode && mdtp.DET_MODE != DAYNIGHT) {
+
+ BOOST_LOG_SEV(logger, notification) << "TimeMode has changed ! ";
+ boost::mutex::scoped_lock lock(*detSignal_mutex);
+ *detSignal = false;
+ lock.unlock();
+ cout << "Send interruption signal to detection process " << endl;
+ pDetection->interruptThread();
+
+ }else if(mdtp.DET_MODE == currentTimeMode || mdtp.DET_MODE == DAYNIGHT) {
+
+ boost::mutex::scoped_lock lock2(*detSignal_mutex);
+ *detSignal = true;
+ detSignal_condition->notify_one();
+ lock2.unlock();
+
+ }
+ }
+
+ // Notify stack thread.
+ if(pStack != NULL) {
+
+ // TimeMode has changed.
+ if(previousTimeMode != currentTimeMode && msp.STACK_MODE != DAYNIGHT) {
+
+ BOOST_LOG_SEV(logger, notification) << "TimeMode has changed ! ";
+ boost::mutex::scoped_lock lock(*stackSignal_mutex);
+ *stackSignal = false;
+ lock.unlock();
+
+ // Force interruption.
+ cout << "Send interruption signal to stack " << endl;
+ pStack->interruptThread();
+
+ }else if(msp.STACK_MODE == currentTimeMode || msp.STACK_MODE == DAYNIGHT) {
+
+ boost::mutex::scoped_lock lock3(*stackSignal_mutex);
+ *stackSignal = true;
+ stackSignal_condition->notify_one();
+ lock3.unlock();
+
+ }
+ }
+
+ cleanStatus = false;
+
+ }else {
+
+ // Exposure control is active, the new frame can't be shared with others threads.
+ if(!cleanStatus) {
+
+ // If stack process exists.
+ if(pStack != NULL) {
+
+ boost::mutex::scoped_lock lock(*stackSignal_mutex);
+ *stackSignal = false;
+ lock.unlock();
+
+ // Force interruption.
+ cout << "Send interruption signal to stack " << endl;
+ pStack->interruptThread();
+
+ }
+
+ // If detection process exists
+ if(pDetection != NULL) {
+
+ boost::mutex::scoped_lock lock(*detSignal_mutex);
+ *detSignal = false;
+ lock.unlock();
+ cout << "Sending interruption signal to detection process... " << endl;
+ pDetection->interruptThread();
+
+ }
+
+ // Reset framebuffer.
+ cout << "Cleaning frameBuffer..." << endl;
+ boost::mutex::scoped_lock lock(*frameBuffer_mutex);
+ frameBuffer->clear();
+ lock.unlock();
+
+ cleanStatus = true;
+
+ }
+
+ }
+
+ previousTimeMode = currentTimeMode;
+
+ // Adjust exposure time.
+ if(pExpCtrl != NULL && exposureControlActive)
+ {
+ pyrDown(newFrame.mImg,newFrame.mImg, Size(newFrame.mImg.cols / 2, newFrame.mImg.rows / 2));
+ exposureControlStatus = pExpCtrl->controlExposureTime(mDevice, newFrame.mImg, newFrame.mDate, mdtp.MASK, mDevice->mMinExposureTime, mcp.ACQ_FPS);
+ }
+
+ // Get current date YYYYMMDD.
+ string currentFrameDate = TimeDate::getYYYYMMDD(newFrame.mDate);
+
+ // If the date has changed, sun ephemeris must be updated.
+ if(currentFrameDate != mCurrentDate) {
+
+ BOOST_LOG_SEV(logger, notification) << "Date has changed. Former Date is " << mCurrentDate << ". New Date is " << currentFrameDate << "." ;
+ computeSunTimes();
+
+ }
+
+ // Acquisition at regular time interval is enabled.
+ if(mcp.regcap.ACQ_REGULAR_ENABLED && !mDevice->mVideoFramesInput) {
+
+ cDate = to_simple_string(boost::posix_time::microsec_clock::universal_time());
+ string nowDate = cDate.substr(0, cDate.find("."));
+
+ boost::posix_time::ptime t1(boost::posix_time::time_from_string(refDate));
+ boost::posix_time::ptime t2(boost::posix_time::time_from_string(nowDate));
+
+ boost::posix_time::time_duration td = t2 - t1;
+ long secTime = td.total_seconds();
+ cout << "NEXT REGCAP : " << (int)(mcp.regcap.ACQ_REGULAR_CFG.interval - secTime) << "s" << endl;
+
+ // Check it's time to run a regular capture.
+ if(secTime >= mcp.regcap.ACQ_REGULAR_CFG.interval) {
+
+ // Current time is after the sunset stop and before the sunrise start = NIGHT
+ if((currentTimeMode == NIGHT) && (mcp.regcap.ACQ_REGULAR_MODE == NIGHT || mcp.regcap.ACQ_REGULAR_MODE == DAYNIGHT)) {
+
+ BOOST_LOG_SEV(logger, notification) << "Run regular acquisition.";
+
+ runImageCapture( mcp.regcap.ACQ_REGULAR_CFG.rep,
+ mcp.regcap.ACQ_REGULAR_CFG.exp,
+ mcp.regcap.ACQ_REGULAR_CFG.gain,
+ mcp.regcap.ACQ_REGULAR_CFG.fmt,
+ mcp.regcap.ACQ_REGULAR_OUTPUT,
+ mcp.regcap.ACQ_REGULAR_PRFX);
+
+ // Current time is between sunrise start and sunset stop = DAY
+ }else if(currentTimeMode == DAY && (mcp.regcap.ACQ_REGULAR_MODE == DAY || mcp.regcap.ACQ_REGULAR_MODE == DAYNIGHT)) {
+
+ BOOST_LOG_SEV(logger, notification) << "Run regular acquisition.";
+ saveImageCaptured(newFrame, 0, mcp.regcap.ACQ_REGULAR_OUTPUT, mcp.regcap.ACQ_REGULAR_PRFX);
+
+ }
+
+ // Reset reference time in case a long exposure has been done.
+ cDate = to_simple_string(boost::posix_time::microsec_clock::universal_time());
+ refDate = cDate.substr(0, cDate.find("."));
+
+ }
+
+ }
+
+ // Acquisiton at scheduled time is enabled.
+ if(mcp.schcap.ACQ_SCHEDULE.size() != 0 && mcp.schcap.ACQ_SCHEDULE_ENABLED && !mDevice->mVideoFramesInput) {
+
+ int next = (mNextAcq.hours * 3600 + mNextAcq.min * 60 + mNextAcq.sec) - (newFrame.mDate.hours * 3600 + newFrame.mDate.minutes * 60 + newFrame.mDate.seconds);
+
+ if(next < 0) {
+ next = (24 * 3600) - (newFrame.mDate.hours * 3600 + newFrame.mDate.minutes * 60 + newFrame.mDate.seconds) + (mNextAcq.hours * 3600 + mNextAcq.min * 60 + mNextAcq.sec);
+ cout << "next : " << next << endl;
+ }
+
+ vectortsch = TimeDate::HdecimalToHMS(next/3600.0);
+
+ cout << "NEXT SCHCAP : " << tsch.at(0) << "h" << tsch.at(1) << "m" << tsch.at(2) << "s" << endl;
+
+ // It's time to run scheduled acquisition.
+ if( mNextAcq.hours == newFrame.mDate.hours &&
+ mNextAcq.min == newFrame.mDate.minutes &&
+ (int)newFrame.mDate.seconds == mNextAcq.sec) {
+
+ CamPixFmt format;
+ format = mNextAcq.fmt;
+
+ runImageCapture( mNextAcq.rep,
+ mNextAcq.exp,
+ mNextAcq.gain,
+ format,
+ mcp.schcap.ACQ_SCHEDULE_OUTPUT,
+ "");
+
+ // Update mNextAcq
+ selectNextAcquisitionSchedule(newFrame.mDate);
+
+ }else {
+
+ // The current time has elapsed.
+ if(newFrame.mDate.hours > mNextAcq.hours) {
+
+ selectNextAcquisitionSchedule(newFrame.mDate);
+
+ }else if(newFrame.mDate.hours == mNextAcq.hours) {
+
+ if(newFrame.mDate.minutes > mNextAcq.min) {
+
+ selectNextAcquisitionSchedule(newFrame.mDate);
+
+ }else if(newFrame.mDate.minutes == mNextAcq.min) {
+
+ if(newFrame.mDate.seconds > mNextAcq.sec) {
+
+ selectNextAcquisitionSchedule(newFrame.mDate);
+
+ }
+
+ }
+
+ }
+
+ }
+
+ }
+
+ // Check sunrise and sunset time.
+ if( (((currentTimeInSec > mStartSunriseTime && currentTimeInSec < mStopSunriseTime) ||
+ (currentTimeInSec > mStartSunsetTime && currentTimeInSec < mStopSunsetTime))) && !mDevice->mVideoFramesInput) {
+
+ exposureControlActive = true;
+
+ }else {
+
+ // Print time before sunrise.
+ if(currentTimeInSec < mStartSunriseTime || currentTimeInSec > mStopSunsetTime ) {
+ vector nextSunrise;
+ if(currentTimeInSec < mStartSunriseTime)
+ nextSunrise = TimeDate::HdecimalToHMS((mStartSunriseTime - currentTimeInSec) / 3600.0);
+ if(currentTimeInSec > mStopSunsetTime)
+ nextSunrise = TimeDate::HdecimalToHMS(((24*3600 - currentTimeInSec) + mStartSunriseTime ) / 3600.0);
+
+ cout << "NEXT SUNRISE : " << nextSunrise.at(0) << "h" << nextSunrise.at(1) << "m" << nextSunrise.at(2) << "s" << endl;
+ }
+
+ // Print time before sunset.
+ if(currentTimeInSec > mStopSunriseTime && currentTimeInSec < mStartSunsetTime){
+ vector nextSunset;
+ nextSunset = TimeDate::HdecimalToHMS((mStartSunsetTime - currentTimeInSec) / 3600.0);
+ cout << "NEXT SUNSET : " << nextSunset.at(0) << "h" << nextSunset.at(1) << "m" << nextSunset.at(2) << "s" << endl;
+
+ }
+
+ // Reset exposure time when sunrise or sunset is finished.
+ if(exposureControlActive) {
+
+ // In DAYTIME : Apply minimum available exposure time.
+ if((currentTimeInSec >= mStopSunriseTime && currentTimeInSec < mStartSunsetTime)){
+
+ BOOST_LOG_SEV(logger, notification) << "Apply day exposure time : " << mDevice->getDayExposureTime();
+ mDevice->setCameraDayExposureTime();
+ BOOST_LOG_SEV(logger, notification) << "Apply day exposure time : " << mDevice->getDayGain();
+ mDevice->setCameraDayGain();
+
+ // In NIGHTTIME : Apply maximum available exposure time.
+ }else if((currentTimeInSec >= mStopSunsetTime) || (currentTimeInSec < mStartSunriseTime)){
+
+ BOOST_LOG_SEV(logger, notification) << "Apply night exposure time." << mDevice->getNightExposureTime();
+ mDevice->setCameraNightExposureTime();
+ BOOST_LOG_SEV(logger, notification) << "Apply night exposure time." << mDevice->getNightGain();
+ mDevice->setCameraNightGain();
+
+ }
+ }
+
+ exposureControlActive = false;
+ exposureControlStatus = false;
+
+ }
+
+ }
+
+ }
+
+ tacq = (((double)getTickCount() - tacq)/getTickFrequency())*1000;
+ std::cout << " [ TIME ACQ ] : " << tacq << " ms ~cFPS(" << (1.0/(tacq/1000.0)) << ")" << endl;
+ BOOST_LOG_SEV(logger, normal) << " [ TIME ACQ ] : " << tacq << " ms";
+
+ mMustStopMutex.lock();
+ stop = mMustStop;
+ mMustStopMutex.unlock();
+
+ dur_elapsed = chrono::high_resolution_clock::now() - start;
+ //cerr<<"Ara "<getCameraStatus());
+
+ // Reset detection process to prepare the analyse of a new data set.
+ if(pDetection != NULL) {
+
+ pDetection->getDetMethod()->resetDetection(true);
+ pDetection->getDetMethod()->resetMask();
+ pDetection->updateDetectionReport();
+ if(!pDetection->getRunStatus())
+ break;
+
+ }
+
+ // Clear framebuffer.
+ boost::mutex::scoped_lock lock(*frameBuffer_mutex);
+ frameBuffer->clear();
+ lock.unlock();
+
+ }while(mDevice->getCameraDataSetStatus() && stop == false);
+
+ }catch(const boost::thread_interrupted&){
+
+ BOOST_LOG_SEV(logger,notification) << "AcqThread ended.";
+ cout << "AcqThread ended." <stopCamera();
+
+ mThreadTerminated = true;
+
+ std::cout << "Acquisition Thread TERMINATED." << endl;
+ BOOST_LOG_SEV(logger,notification) << "Acquisition Thread TERMINATED";
+
+}
+
+void AcqThread::selectNextAcquisitionSchedule(TimeDate::Date date){
+
+ if(mcp.schcap.ACQ_SCHEDULE.size() != 0){
+
+ // Search next acquisition
+ for(int i = 0; i < mcp.schcap.ACQ_SCHEDULE.size(); i++){
+
+ if(date.hours < mcp.schcap.ACQ_SCHEDULE.at(i).hours){
+
+ mNextAcqIndex = i;
+ break;
+
+ }else if(date.hours == mcp.schcap.ACQ_SCHEDULE.at(i).hours){
+
+ if(date.minutes < mcp.schcap.ACQ_SCHEDULE.at(i).min){
+
+ mNextAcqIndex = i;
+ break;
+
+ }else if(date.minutes == mcp.schcap.ACQ_SCHEDULE.at(i).min){
+
+ if(date.seconds < mcp.schcap.ACQ_SCHEDULE.at(i).sec){
+
+ mNextAcqIndex = i;
+ break;
+
+ }
+ }
+ }
+ }
+
+ mNextAcq = mcp.schcap.ACQ_SCHEDULE.at(mNextAcqIndex);
+
+ }
+
+}
+
+bool AcqThread::buildAcquisitionDirectory(string YYYYMMDD){
+
+ namespace fs = boost::filesystem;
+ string root = mdp.DATA_PATH + mstp.STATION_NAME + "_" + YYYYMMDD +"/";
+
+ string subDir = "captures/";
+ string finalPath = root + subDir;
+
+ mOutputDataPath = finalPath;
+ BOOST_LOG_SEV(logger,notification) << "CompleteDataPath : " << mOutputDataPath;
+
+ path p(mdp.DATA_PATH);
+ path p1(root);
+ path p2(root + subDir);
+
+ // If DATA_PATH exists
+ if(fs::exists(p)){
+
+ // If DATA_PATH/STATI ON_YYYYMMDD/ exists
+ if(fs::exists(p1)){
+
+ // If DATA_PATH/STATION_YYYYMMDD/captures/ doesn't exists
+ if(!fs::exists(p2)){
+
+ // If fail to create DATA_PATH/STATION_YYYYMMDD/captures/
+ if(!fs::create_directory(p2)){
+
+ BOOST_LOG_SEV(logger,critical) << "Unable to create captures directory : " << p2.string();
+ return false;
+
+ // If success to create DATA_PATH/STATION_YYYYMMDD/captures/
+ }else{
+
+ BOOST_LOG_SEV(logger,notification) << "Success to create captures directory : " << p2.string();
+ return true;
+
+ }
+ }
+
+ // If DATA_PATH/STATION_YYYYMMDD/ doesn't exists
+ }else{
+
+ // If fail to create DATA_PATH/STATION_YYYYMMDD/
+ if(!fs::create_directory(p1)){
+
+ BOOST_LOG_SEV(logger,fail) << "Unable to create STATION_YYYYMMDD directory : " << p1.string();
+ return false;
+
+ // If success to create DATA_PATH/STATION_YYYYMMDD/
+ }else{
+
+ BOOST_LOG_SEV(logger,notification) << "Success to create STATION_YYYYMMDD directory : " << p1.string();
+
+ // If fail to create DATA_PATH/STATION_YYYYMMDD/stack/
+ if(!fs::create_directory(p2)){
+
+ BOOST_LOG_SEV(logger,critical) << "Unable to create captures directory : " << p2.string();
+ return false;
+
+ // If success to create DATA_PATH/STATION_YYYYMMDD/stack/
+ }else{
+
+ BOOST_LOG_SEV(logger,notification) << "Success to create captures directory : " << p2.string();
+ return true;
+
+ }
+ }
+ }
+
+ // If DATA_PATH doesn't exists
+ }else{
+
+ // If fail to create DATA_PATH
+ if(!fs::create_directory(p)){
+
+ BOOST_LOG_SEV(logger,fail) << "Unable to create DATA_PATH directory : " << p.string();
+ return false;
+
+ // If success to create DATA_PATH
+ }else{
+
+ BOOST_LOG_SEV(logger,notification) << "Success to create DATA_PATH directory : " << p.string();
+
+ // If fail to create DATA_PATH/STATION_YYYYMMDD/
+ if(!fs::create_directory(p1)){
+
+ BOOST_LOG_SEV(logger,fail) << "Unable to create STATION_YYYYMMDD directory : " << p1.string();
+ return false;
+
+ // If success to create DATA_PATH/STATION_YYYYMMDD/
+ }else{
+
+ BOOST_LOG_SEV(logger,notification) << "Success to create STATION_YYYYMMDD directory : " << p1.string();
+
+ // If fail to create DATA_PATH/STATION_YYYYMMDD/captures/
+ if(!fs::create_directory(p2)){
+
+ BOOST_LOG_SEV(logger,critical) << "Unable to create captures directory : " << p2.string();
+ return false;
+
+ // If success to create DATA_PATH/STATION_YYYYMMDD/captures/
+ }else{
+
+ BOOST_LOG_SEV(logger,notification) << "Success to create captures directory : " << p2.string();
+ return true;
+
+ }
+ }
+ }
+ }
+
+ return true;
+}
+
+void AcqThread::runImageCapture(int imgNumber, int imgExposure, int imgGain, CamPixFmt imgFormat, ImgFormat imgOutput, string imgPrefix) {
+
+ // Stop camera
+ mDevice->stopCamera();
+
+ // Stop stack process.
+ if(pStack != NULL){
+
+ boost::mutex::scoped_lock lock(*stackSignal_mutex);
+ *stackSignal = false;
+ lock.unlock();
+
+ // Force interruption.
+ BOOST_LOG_SEV(logger, notification) << "Send reset signal to stack. ";
+ pStack->interruptThread();
+
+ }
+
+ // Stop detection process.
+ if(pDetection != NULL){
+
+ boost::mutex::scoped_lock lock(*detSignal_mutex);
+ *detSignal = false;
+ lock.unlock();
+ BOOST_LOG_SEV(logger, notification) << "Send reset signal to detection process. ";
+ pDetection->interruptThread();
+
+ }
+
+ // Reset framebuffer.
+ BOOST_LOG_SEV(logger, notification) << "Cleaning frameBuffer...";
+ boost::mutex::scoped_lock lock(*frameBuffer_mutex);
+ frameBuffer->clear();
+ lock.unlock();
+
+ for(int i = 0; i < imgNumber; i++) {
+
+ BOOST_LOG_SEV(logger, notification) << "Prepare capture n° " << i;
+
+ // Configuration for single capture.
+ Frame frame;
+ BOOST_LOG_SEV(logger, notification) << "Exposure time : " << imgExposure;
+ frame.mExposure = imgExposure;
+ BOOST_LOG_SEV(logger, notification) << "Gain : " << imgGain;
+ frame.mGain = imgGain;
+ EParser format;
+ BOOST_LOG_SEV(logger, notification) << "Format : " << format.getStringEnum(imgFormat);
+ frame.mFormat = imgFormat;
+
+ if(mcp.ACQ_RES_CUSTOM_SIZE) {
+ frame.mHeight = mcp.ACQ_HEIGHT;
+ frame.mWidth = mcp.ACQ_WIDTH;
+ }
+
+ // Run single capture.
+ BOOST_LOG_SEV(logger, notification) << "Run single capture.";
+ if(mDevice->runSingleCapture(frame)) {
+
+ BOOST_LOG_SEV(logger, notification) << "Single capture succeed !";
+ cout << "Single capture succeed !" << endl;
+ saveImageCaptured(frame, i, imgOutput, imgPrefix);
+
+ }else{
+
+ BOOST_LOG_SEV(logger, fail) << "Single capture failed !";
+
+ }
+
+ }
+
+ #ifdef WINDOWS
+ Sleep(1000);
+ #else
+ #ifdef LINUX
+ sleep(1);
+ #endif
+ #endif
+
+ BOOST_LOG_SEV(logger, notification) << "Restarting camera in continuous mode...";
+
+ // RECREATE CAMERA
+ if(!mDevice->recreateCamera())
+ throw "Fail to restart camera.";
+
+ prepareAcquisitionOnDevice();
+
+}
+
+void AcqThread::saveImageCaptured(Frame &img, int imgNum, ImgFormat outputType, string imgPrefix) {
+
+ if(img.mImg.data) {
+
+ string YYYYMMDD = TimeDate::getYYYYMMDD(img.mDate);
+
+ if(buildAcquisitionDirectory(YYYYMMDD)) {
+
+ string fileName = imgPrefix + "_" + TimeDate::getYYYYMMDDThhmmss(img.mDate) + "_UT-" + Conversion::intToString(imgNum);
+
+ switch(outputType) {
+
+ case JPEG :
+
+ {
+
+ switch(img.mFormat) {
+
+ case MONO12 :
+
+ {
+
+ Mat temp;
+ img.mImg.copyTo(temp);
+ Mat newMat = ImgProcessing::correctGammaOnMono12(temp, 2.2);
+ Mat newMat2 = Conversion::convertTo8UC1(newMat);
+ SaveImg::saveJPEG(newMat2, mOutputDataPath + fileName);
+
+ }
+
+ break;
+
+ default :
+
+ {
+
+ Mat temp;
+ img.mImg.copyTo(temp);
+ Mat newMat = ImgProcessing::correctGammaOnMono8(temp, 2.2);
+ SaveImg::saveJPEG(newMat, mOutputDataPath + fileName);
+
+ }
+
+ }
+ }
+
+ break;
+
+ case FITS :
+
+ {
+
+ Fits2D newFits(mOutputDataPath);
+ newFits.loadKeys(mfkp, mstp);
+ newFits.kGAINDB = img.mGain;
+ newFits.kEXPOSURE = img.mExposure/1000000.0;
+ newFits.kONTIME = img.mExposure/1000000.0;
+ newFits.kELAPTIME = img.mExposure/1000000.0;
+ newFits.kDATEOBS = TimeDate::getIsoExtendedFormatDate(img.mDate);
+
+ double debObsInSeconds = img.mDate.hours*3600 + img.mDate.minutes*60 + img.mDate.seconds;
+ double julianDate = TimeDate::gregorianToJulian(img.mDate);
+ double julianCentury = TimeDate::julianCentury(julianDate);
+
+ newFits.kCRVAL1 = TimeDate::localSideralTime_2(julianCentury, img.mDate.hours, img.mDate.minutes, (int)img.mDate.seconds, mstp.SITELONG);
+ newFits.kCTYPE1 = "RA---ARC";
+ newFits.kCTYPE2 = "DEC--ARC";
+ newFits.kEQUINOX = 2000.0;
+
+ switch(img.mFormat) {
+
+ case MONO12 :
+
+ {
+
+ // Convert unsigned short type image in short type image.
+ Mat newMat = Mat(img.mImg.rows, img.mImg.cols, CV_16SC1, Scalar(0));
+
+ // Set bzero and bscale for print unsigned short value in soft visualization.
+ newFits.kBZERO = 32768;
+ newFits.kBSCALE = 1;
+
+ unsigned short *ptr = NULL;
+ short *ptr2 = NULL;
+
+ for(int i = 0; i < img.mImg.rows; i++){
+
+ ptr = img.mImg.ptr(i);
+ ptr2 = newMat.ptr(i);
+
+ for(int j = 0; j < img.mImg.cols; j++){
+
+ if(ptr[j] - 32768 > 32767){
+
+ ptr2[j] = 32767;
+
+ }else{
+
+ ptr2[j] = ptr[j] - 32768;
+ }
+ }
+ }
+
+ // Create FITS image with BITPIX = SHORT_IMG (16-bits signed integers), pixel with TSHORT (signed short)
+ if(newFits.writeFits(newMat, S16, fileName))
+ cout << ">> Fits saved in : " << mOutputDataPath << fileName << endl;
+
+ }
+
+ break;
+
+ default :
+
+ {
+
+ if(newFits.writeFits(img.mImg, UC8, fileName))
+ cout << ">> Fits saved in : " << mOutputDataPath << fileName << endl;
+
+ }
+
+ }
+
+ }
+
+ break;
+
+ }
+
+ }
+ }
+
+}
+
+bool AcqThread::computeSunTimes() {
+
+ int sunriseStartH = 0, sunriseStartM = 0, sunriseStopH = 0, sunriseStopM = 0,
+ sunsetStartH = 0, sunsetStartM = 0, sunsetStopH = 0, sunsetStopM = 0;
+
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+ string date = to_iso_extended_string(time);
+ vector intDate = TimeDate::getIntVectorFromDateString(date);
+
+ string month = Conversion::intToString(intDate.at(1));
+ if(month.size() == 1) month = "0" + month;
+ string day = Conversion::intToString(intDate.at(2));
+ if(day.size() == 1) day = "0" + day;
+ mCurrentDate = Conversion::intToString(intDate.at(0)) + month + day;
+ mCurrentTime = intDate.at(3) * 3600 + intDate.at(4) * 60 + intDate.at(5);
+
+ cout << "LOCAL DATE : " << mCurrentDate << endl;
+
+ if(mcp.ephem.EPHEMERIS_ENABLED) {
+
+ Ephemeris ephem1 = Ephemeris(mCurrentDate, mcp.ephem.SUN_HORIZON_1, mstp.SITELONG, mstp.SITELAT);
+
+ if(!ephem1.computeEphemeris(sunriseStartH, sunriseStartM,sunsetStopH, sunsetStopM)) {
+
+ return false;
+
+ }
+
+ Ephemeris ephem2 = Ephemeris(mCurrentDate, mcp.ephem.SUN_HORIZON_2, mstp.SITELONG, mstp.SITELAT );
+
+ if(!ephem2.computeEphemeris(sunriseStopH, sunriseStopM,sunsetStartH, sunsetStartM)) {
+
+ return false;
+
+ }
+
+ }else {
+
+ sunriseStartH = mcp.ephem.SUNRISE_TIME.at(0);
+ sunriseStartM = mcp.ephem.SUNRISE_TIME.at(1);
+
+ double intpart1 = 0;
+ double fractpart1 = modf((double)mcp.ephem.SUNRISE_DURATION/3600.0 , &intpart1);
+
+ if(intpart1!=0) {
+
+ if(sunriseStartH + intpart1 < 24) {
+
+ sunriseStopH = sunriseStartH + intpart1;
+
+
+ }else {
+
+ sunriseStopH = sunriseStartH + intpart1 - 24;
+
+ }
+
+ }else {
+
+ sunriseStopH = sunriseStartH;
+
+ }
+
+ double intpart2 = 0;
+ double fractpart2 = modf(fractpart1 * 60 , &intpart2);
+
+ if(sunriseStartM + intpart2 < 60) {
+
+ sunriseStopM = sunriseStartM + intpart2;
+
+ }else {
+
+
+ if(sunriseStopH + 1 < 24) {
+
+ sunriseStopH += 1;
+
+ }else {
+
+ sunriseStopH = sunriseStopH + 1 - 24;
+
+ }
+
+
+ sunriseStopM = intpart2;
+
+ }
+
+ sunsetStartH = mcp.ephem.SUNSET_TIME.at(0);
+ sunsetStartM = mcp.ephem.SUNSET_TIME.at(1);
+
+ double intpart3 = 0;
+ double fractpart3 = modf((double)mcp.ephem.SUNSET_DURATION/3600.0 , &intpart3);
+
+ if(intpart3!=0) {
+
+ if(sunsetStartH + intpart3 < 24) {
+
+ sunsetStopH = sunsetStartH + intpart3;
+
+ }else {
+
+ sunsetStopH = sunsetStartH + intpart3 - 24;
+
+ }
+
+ }else {
+
+ sunsetStopH = sunsetStartH;
+
+ }
+
+ double intpart4 = 0;
+ double fractpart4 = modf(fractpart3 * 60 , &intpart4);
+
+ if(sunsetStartM + intpart4 < 60) {
+
+ sunsetStopM = sunsetStartM + intpart4;
+
+ }else {
+
+
+ if(sunsetStopH + 1 < 24) {
+
+ sunsetStopH += 1;
+
+ }else {
+
+ sunsetStopH = sunsetStopH + 1 - 24;
+
+ }
+
+ sunsetStopM = intpart4;
+
+ }
+
+ }
+
+ cout << "SUNRISE : " << sunriseStartH << "H" << sunriseStartM << " - " << sunriseStopH << "H" << sunriseStopM << endl;
+ cout << "SUNSET : " << sunsetStartH << "H" << sunsetStartM << " - " << sunsetStopH << "H" << sunsetStopM << endl;
+
+ mStartSunriseTime = sunriseStartH * 3600 + sunriseStartM * 60;
+ mStopSunriseTime = sunriseStopH * 3600 + sunriseStopM * 60;
+ mStartSunsetTime = sunsetStartH * 3600 + sunsetStartM * 60;
+ mStopSunsetTime = sunsetStopH * 3600 + sunsetStopM * 60;
+
+ return true;
+
+}
+
+bool AcqThread::prepareAcquisitionOnDevice() {
+
+
+ // SET SIZE
+ if(!mDevice->setCameraSize())
+ return false;
+
+ // SET FORMAT
+ if(!mDevice->setCameraPixelFormat())
+ return false;
+
+ // LOAD GET BOUNDS
+ mDevice->getCameraExposureBounds();
+ mDevice->getCameraGainBounds();
+
+ // Get Sunrise start/stop, Sunset start/stop. ---
+ computeSunTimes();
+
+ // CHECK SUNRISE AND SUNSET TIMES.
+
+ if((mCurrentTime > mStopSunsetTime) || (mCurrentTime < mStartSunriseTime)) {
+
+ BOOST_LOG_SEV(logger, notification) << "DAYTIME : NO";
+ BOOST_LOG_SEV(logger, notification) << "AUTO EXPOSURE : NO";
+ BOOST_LOG_SEV(logger, notification) << "EXPOSURE TIME : " << mDevice->getNightExposureTime();
+ BOOST_LOG_SEV(logger, notification) << "GAIN : " << mDevice->getNightGain();
+
+ if(!mDevice->setCameraNightExposureTime())
+ return false;
+
+ if(!mDevice->setCameraNightGain())
+ return false;
+
+ }else if((mCurrentTime > mStopSunriseTime && mCurrentTime < mStartSunsetTime)) {
+
+ BOOST_LOG_SEV(logger, notification) << "DAYTIME : YES";
+ BOOST_LOG_SEV(logger, notification) << "AUTO EXPOSURE : NO";
+ BOOST_LOG_SEV(logger, notification) << "EXPOSURE TIME : " << mDevice->getDayExposureTime();
+ BOOST_LOG_SEV(logger, notification) << "GAIN : " << mDevice->getDayGain();
+
+ if(!mDevice->setCameraDayExposureTime())
+ return false;
+
+ if(!mDevice->setCameraDayGain())
+ return false;
+
+ }else{
+
+ BOOST_LOG_SEV(logger, notification) << "DAYTIME : NO";
+ BOOST_LOG_SEV(logger, notification) << "AUTO EXPOSURE : YES";
+ BOOST_LOG_SEV(logger, notification) << "EXPOSURE TIME : Minimum (" << mDevice->mMinExposureTime << ")"<< mDevice->getNightExposureTime();
+ BOOST_LOG_SEV(logger, notification) << "GAIN : Minimum (" << mDevice->mMinGain << ")";
+
+ if(!mDevice->setCameraExposureTime(mDevice->mMinExposureTime))
+ return false;
+
+ if(!mDevice->setCameraGain(mDevice->mMinGain))
+ return false;
+
+ }
+
+ // SET FPS.
+ if(!mDevice->setCameraFPS())
+ return false;
+
+ // INIT CAMERA.
+ if(!mDevice->initializeCamera())
+ return false;
+
+ // START CAMERA.
+ if(!mDevice->startCamera())
+ return false;
+
+ return true;
+
+}
+
diff --git a/AcqThread.h b/AcqThread.h
new file mode 100644
index 0000000..0aa4b64
--- /dev/null
+++ b/AcqThread.h
@@ -0,0 +1,186 @@
+/*
+ AcqThread.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2016 Yoan Audureau, Chiara Marmo -- FRIPON-GEOPS-UPSUD
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 03/10/2016
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file AcqThread.h
+* \author Yoan Audureau, Chiara Marmo -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 03/10/2016
+* \brief Acquisition thread.
+*/
+
+#ifndef ACQTHREAD_H
+#define ACQTHREAD_H
+
+#include "config.h"
+
+#ifdef LINUX
+ #define BOOST_LOG_DYN_LINK 1
+#endif
+
+#include "ECamPixFmt.h"
+#include "EImgFormat.h"
+#include "DetThread.h"
+#include "StackThread.h"
+#include "Device.h"
+#include "ExposureControl.h"
+#include "ImgProcessing.h"
+#include "Ephemeris.h"
+#include "Fits2D.h"
+#include "SParam.h"
+//#include "SpriteThread.h"
+//#include "SpriteReader.h"
+
+using namespace cv;
+using namespace std;
+
+class AcqThread {
+
+ private :
+
+ static boost::log::sources::severity_logger< LogSeverityLevel > logger;
+
+ static class Init {
+
+ public:
+
+ Init() {
+
+ logger.add_attribute("ClassName", boost::log::attributes::constant("AcqThread"));
+
+ }
+
+ }initializer;
+
+ bool mMustStop; // Signal to stop thread.
+ boost::mutex mMustStopMutex;
+ boost::thread *mThread; // Acquisition thread.
+ bool mThreadTerminated; // Terminated status of the thread.
+ Device *mDevice; // Device used for acquisition.
+ int mDeviceID; // Index of the device to use.
+ scheduleParam mNextAcq; // Next scheduled acquisition.
+ int mNextAcqIndex;
+ DetThread *pDetection; // Pointer on detection thread in order to stop it or reset it when a regular capture occurs.
+ StackThread *pStack; // Pointer on stack thread in order to save and reset a stack when a regular capture occurs.
+ ExposureControl *pExpCtrl; // Pointer on exposure time control object while sunrise and sunset.
+ string mOutputDataPath; // Dynamic location where to save data (regular captures etc...).
+ string mCurrentDate;
+ int mStartSunriseTime; // In seconds.
+ int mStopSunriseTime; // In seconds.
+ int mStartSunsetTime; // In seconds.
+ int mStopSunsetTime; // In seconds.
+ int mCurrentTime; // In seconds.
+
+ // Parameters from configuration file.
+ stackParam msp;
+ stationParam mstp;
+ detectionParam mdtp;
+ cameraParam mcp;
+ dataParam mdp;
+ fitskeysParam mfkp;
+ framesParam mfp;
+ videoParam mvp;
+
+ // Communication with the shared framebuffer.
+ boost::condition_variable *frameBuffer_condition;
+ boost::mutex *frameBuffer_mutex;
+ boost::circular_buffer *frameBuffer;
+ //boost::circular_buffer *frameSprite;
+ //vector *frameSprite;
+
+ // Communication with DetThread.
+ bool *stackSignal;
+ boost::mutex *stackSignal_mutex;
+ boost::condition_variable *stackSignal_condition;
+
+ // Communication with StackThread.
+ bool *detSignal;
+ boost::mutex *detSignal_mutex;
+ boost::condition_variable *detSignal_condition;
+
+ //SPRITE
+ /*SpriteThread *sprite;
+ SpriteReader spr_reader;
+ mutex *mut_sprite;*/
+
+ public :
+
+ AcqThread( boost::circular_buffer *fb,
+ //boost::circular_buffer *frame_sprite,
+ //vector *frame_sprite,
+ boost::mutex *fb_m,
+ boost::condition_variable *fb_c,
+ bool *sSignal,
+ boost::mutex *sSignal_m,
+ boost::condition_variable *sSignal_c,
+ bool *dSignal,
+ boost::mutex *dSignal_m,
+ boost::condition_variable *dSignal_c,
+ DetThread *detection,
+ StackThread *stack,
+ int cid,
+ dataParam dp,
+ stackParam sp,
+ stationParam stp,
+ detectionParam dtp,
+ cameraParam acq,
+ framesParam fp,
+ videoParam vp,
+ fitskeysParam fkp);
+
+ ~AcqThread(void);
+
+ void operator()();
+
+ void stopThread();
+
+ bool startThread();
+
+ // Return activity status.
+ bool getThreadStatus();
+
+ private :
+
+ // Compute in seconds the sunrise start/stop times and the sunset start/stop times.
+ bool computeSunTimes();
+
+ // Build the directory where the data will be saved.
+ bool buildAcquisitionDirectory(string YYYYMMDD);
+
+ // Analyse the scheduled acquisition list to find the next one according to the current time.
+ void selectNextAcquisitionSchedule(TimeDate::Date date);
+
+ // Save a capture on disk.
+ void saveImageCaptured(Frame &img, int imgNum, ImgFormat outputType, string imgPrefix);
+
+ // Run a regular or scheduled acquisition.
+ void runImageCapture(int imgNumber, int imgExposure, int imgGain, CamPixFmt imgFormat, ImgFormat imgOutput, string imgPrefix);
+
+ // Prepare the device for a continuous acquisition.
+ bool prepareAcquisitionOnDevice();
+};
+
+#endif
diff --git a/Base64.cpp b/Base64.cpp
new file mode 100644
index 0000000..42c8b42
--- /dev/null
+++ b/Base64.cpp
@@ -0,0 +1,54 @@
+/*
+ Base64.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/07/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file Base64.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 26/11/2014
+* \brief Handle Base64 encryption.
+*/
+
+#include "Base64.h"
+
+string Base64::encodeBase64(string data){
+
+ stringstream os;
+
+ typedef boost::archive::iterators::base64_from_binary< // Convert binary values to base64 characters.
+ boost::archive::iterators::transform_width // Retrieve 6 bit integers from a sequence of 8 bit bytes.
+ >base64_text; // Compose all the above operations in to a new iterator.
+
+ copy(
+ base64_text(data.c_str()),
+ base64_text(data.c_str() + data.size()),
+ boost::archive::iterators::ostream_iterator(os)
+ );
+
+ return os.str();
+
+}
diff --git a/Base64.h b/Base64.h
new file mode 100644
index 0000000..fd01f27
--- /dev/null
+++ b/Base64.h
@@ -0,0 +1,70 @@
+/*
+ Base64.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 26/11/2014
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file Base64.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 26/11/2014
+* \brief Handle Base64 encryption.
+*/
+
+#pragma once
+
+#include
+#include
+#include
+#include
+#include
+
+using namespace std;
+
+class Base64 {
+
+ public :
+
+ /**
+ * Constructor.
+ */
+ Base64() {};
+
+ /**
+ * Destructor.
+ */
+ ~Base64() {};
+
+ /**
+ * Encode string data with base64.
+ *
+ * @param data String to encode.
+ * @return Encoded string.
+ */
+ static string encodeBase64(string data);
+
+};
+
+
diff --git a/Camera.h b/Camera.h
new file mode 100644
index 0000000..604bac3
--- /dev/null
+++ b/Camera.h
@@ -0,0 +1,278 @@
+/*
+ Camera.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 21/01/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file Camera.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 13/06/2014
+* \brief
+*/
+
+#pragma once
+
+#include "config.h"
+#include "opencv2/highgui/highgui.hpp"
+#include
+#include "ECamPixFmt.h"
+#include "Frame.h"
+#include "EInputDeviceType.h"
+
+using namespace cv;
+using namespace std;
+
+class Camera {
+
+ public :
+
+ bool mExposureAvailable;
+ bool mGainAvailable;
+ bool mCamSizeToMax;
+ int mCamSizeWidth;
+ int mCamSizeHeight;
+ InputDeviceType mInputDeviceType;
+ bool mVerbose;
+
+ public :
+
+ Camera() {
+
+ mInputDeviceType = UNDEFINED_INPUT_TYPE;
+ mVerbose = true;
+
+ }
+
+ virtual ~Camera() {};
+
+ virtual vector> getCamerasList() {
+
+ vector> v;
+ return v;
+
+ }
+
+ virtual void getAvailablePixelFormats() {};
+
+ /**
+ * List connected GigE devices.
+ *
+ */
+ virtual bool listCameras() {return false;};
+
+ /**
+ * Get informations about a specific device.
+ *
+ */
+ virtual bool getInfos() {return false;};
+
+ /**
+ * Open/create a device.
+ *
+ * @param id Identification number of the camera to create.
+ */
+ virtual bool createDevice(int id) {return false;};
+
+ /**
+ * Get camera name from its ID.
+ *
+ * @param id Identification number of the camera from which the name is required.
+ * @param device The camera's name found.
+ * @return Success status to find camera's name.
+ */
+ virtual bool getDeviceNameById(int id, string &deviceName) {return false;};
+
+ virtual bool getCameraName() {return false;};
+
+ InputDeviceType getDeviceType() {return mInputDeviceType;};
+
+ /**
+ * Get device's grabbing status.
+ *
+ * @return Device grabs frames or not.
+ */
+ virtual bool getStopStatus() {return false;};
+
+ /**
+ * Prepare device to grab frames.
+ *
+ * @return Success status to prepare camera.
+ */
+ virtual bool grabInitialization() {return false;};
+
+ /**
+ * Run acquisition.
+ *
+ */
+ virtual bool acqStart() {return false;};
+
+ /**
+ * Stop acquisition.
+ *
+ */
+ virtual void acqStop() {};
+
+ /**
+ * Close a device and clean resources.
+ *
+ */
+ virtual void grabCleanse() {};
+
+ /**
+ * Get a frame from continuous acquisition.
+ *
+ * @param newFrame New frame's container object.
+ * @return Success status to grab a frame.
+ */
+ virtual bool grabImage(Frame &newFrame) {return false;};
+
+ /**
+ * Get a frame from single acquisition.
+ *
+ * @param newFrame Frame's container object.
+ * @param camID Device's identification number from which the single acquisition will be performed.
+ * @return Success status to grab a frame.
+ */
+ virtual bool grabSingleImage(Frame &frame, int camID) {return false;};
+
+ /**
+ * Get device's exposure time bounds.
+ *
+ * @param eMin Return minimum exposure time value.
+ * @param eMax Return maximum exposure time value.
+ */
+ virtual void getExposureBounds(double &eMin, double &eMax) {};
+
+ /**
+ * Get device's gain bounds.
+ *
+ * @param gMin Return minimum gain value.
+ * @param gMax Return maximum gain value.
+ */
+ virtual void getGainBounds(int &gMin, int &gMax) {};
+
+ /**
+ * Get device's image format.
+ *
+ * @param format Return image format.
+ * @return Success status to get format.
+ */
+ virtual bool getPixelFormat(CamPixFmt &format) {return false;};
+
+ /**
+ * Get device's frame size.
+ *
+ * @param frame's width
+ * @param frame's height
+ * @return Success to get frame'size.
+ */
+ virtual bool getFrameSize(int &w, int &h) {return false;};
+
+ /**
+ * Get device's acquisition frequency.
+ *
+ * @return Device's fps.
+ */
+ virtual bool getFPS(double &value) {return false;};
+
+ /**
+ * Get FPS enumeration values.
+ *
+ * @return Possible fps values.
+ */
+ virtual bool getFpsEnum(vector &values) {return false;};
+
+ /**
+ * Get device's model name.
+ *
+ * @return Device's model name.
+ */
+ virtual string getModelName() {return "";};
+
+ /**
+ * Get device's gain value.
+ *
+ * @return Device's gain.
+ */
+ virtual int getGain() {return 0;};
+
+ /**
+ * Get device's exposure time value.
+ *
+ * @return Device's exposure time.
+ */
+ virtual double getExposureTime() {return 0.0;};
+
+ /**
+ * Set device's exposure time value.
+ *
+ * @param value New exposure time value (us).
+ * @return Success status to set new exposure time.
+ */
+ virtual bool setExposureTime(double value) {return false;};
+
+ /**
+ * Set device's gain value.
+ *
+ * @param value New gain value.
+ * @return Success status to set new gain.
+ */
+ virtual bool setGain(int value) {return false;};
+
+ /**
+ * Set device's acquisition frequency.
+ *
+ * @param value New fps value.
+ * @return Success status to set fps.
+ */
+ virtual bool setFPS(double value) {return false;};
+
+ virtual bool setSize(int width, int height, bool customSize) {return false;};
+
+ /**
+ * Set device's format.
+ *
+ * @param format New format.
+ * @return Success status to set format.
+ */
+ virtual bool setPixelFormat(CamPixFmt format) {return false;};
+
+ /**
+ * Get data status if a set of directories or videos are used in input.
+ *
+ * @return If there is still recorded frames to load in input.
+ */
+ virtual bool getDataSetStatus() {return false;};
+
+ /**
+ * Load next data set of frames.
+ *
+ * @return Success status to load next data set.
+ */
+ virtual bool loadNextDataSet(string &location) {location = ""; return true; };
+
+ virtual void test() {cout << " in camera.h" << endl;};
+
+};
diff --git a/CameraFrames.cpp b/CameraFrames.cpp
new file mode 100644
index 0000000..f82554d
--- /dev/null
+++ b/CameraFrames.cpp
@@ -0,0 +1,335 @@
+/*
+ CameraFrames.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/07/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraFrames.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 02/09/2014
+* \brief Fits frames in input of acquisition thread.
+*/
+
+#include "CameraFrames.h"
+
+boost::log::sources::severity_logger< LogSeverityLevel > CameraFrames::logger;
+
+CameraFrames::Init CameraFrames::initializer;
+
+CameraFrames::CameraFrames(vector locationList, int numPos, bool verbose):
+mNumFramePos(numPos), mReadDataStatus(false), mCurrDirId(0),
+mFirstFrameNum(0), mLastFrameNum(0) {
+
+ if(locationList.size()>0)
+ mFramesDir = locationList;
+ else
+ throw "No frames directory in input.";
+
+ mExposureAvailable = false;
+ mGainAvailable = false;
+ mInputDeviceType = SINGLE_FITS_FRAME;
+ mVerbose = verbose;
+
+}
+
+CameraFrames::~CameraFrames(void) {
+
+}
+
+bool CameraFrames::loadNextDataSet(string &location) {
+
+ cout << mCurrDirId << endl;
+
+ location = mFramesDir.at(mCurrDirId);
+
+ //if(mCurrDirId !=0 ) {
+
+ mReadDataStatus = false;
+
+ if(!searchMinMaxFramesNumber(mFramesDir.at(mCurrDirId)))
+ return false;
+
+ //}
+
+ return true;
+
+}
+
+bool CameraFrames::grabInitialization() {
+
+ return searchMinMaxFramesNumber(mFramesDir.at(mCurrDirId));
+
+}
+
+bool CameraFrames::getDataSetStatus() {
+
+ mCurrDirId++;
+
+ if(mCurrDirId >= mFramesDir.size()) return false;
+ else return true;
+}
+
+bool CameraFrames::getCameraName() {
+ cout << "Fits frames data." << endl;
+ return true;
+}
+
+bool CameraFrames::searchMinMaxFramesNumber(string location) {
+
+ namespace fs = boost::filesystem;
+
+ path p(location);
+
+ if(fs::exists(p)){
+
+ if(mVerbose) BOOST_LOG_SEV(logger, normal) << "Frame's directory exists : " << location;
+
+ int firstFrame = -1, lastFrame = 0;
+ string filename = "";
+
+ // Search first and last frames numbers in the directory.
+ for(directory_iterator file(p);file!= directory_iterator(); ++file) {
+
+ path curr(file->path());
+
+ if(is_regular_file(curr)) {
+
+ // Get file name.
+ string fname = curr.filename().string();
+
+ // Split file name according to the separator "_".
+ vector output;
+ typedef boost::tokenizer > tokenizer;
+ boost::char_separator sep("_");
+ tokenizer tokens(fname, sep);
+
+ for (tokenizer::iterator tok_iter = tokens.begin();tok_iter != tokens.end(); ++tok_iter) {
+ output.push_back(*tok_iter);
+ }
+
+ // Search frame number according to the number position known in the file name.
+
+ int i = 0, number = 0;
+
+ for(int j = 0; j < output.size(); j++) {
+
+ if(j == mNumFramePos && j != output.size() - 1) {
+
+ number = atoi(output.at(j).c_str());
+ break;
+ }
+
+ // If the frame number is at the end (before the file extension).
+ if(j == mNumFramePos && j == output.size() - 1) {
+
+ vector output2;
+ typedef boost::tokenizer > tokenizer;
+ boost::char_separator sep2(".");
+ tokenizer tokens2(output.back(), sep2);
+
+ for (tokenizer::iterator tok_iter = tokens2.begin();tok_iter != tokens2.end(); ++tok_iter) {
+ output2.push_back(*tok_iter);
+ }
+
+ number = atoi(output2.front().c_str());
+ break;
+
+ }
+
+ i++;
+
+ }
+
+ if(firstFrame == -1) {
+
+ firstFrame = number;
+
+ }else if(number < firstFrame) {
+
+ firstFrame = number;
+
+ }
+
+ if(number > lastFrame) {
+
+ lastFrame = number;
+
+ }
+ }
+
+ }
+
+ if(mVerbose) BOOST_LOG_SEV(logger, normal) << "First frame number in frame's directory : " << firstFrame;
+ if(mVerbose) BOOST_LOG_SEV(logger, normal) << "Last frame number in frame's directory : " << lastFrame;
+
+ mLastFrameNum = lastFrame;
+ mFirstFrameNum = firstFrame;
+
+ return true;
+
+ }else{
+
+ if(mVerbose) BOOST_LOG_SEV(logger, fail) << "Frame's directory not found.";
+ if(mVerbose) cout << "Frame's directory not found." << endl;
+ return false;
+
+ }
+
+}
+
+bool CameraFrames::getStopStatus() {
+
+ return mReadDataStatus;
+
+}
+
+bool CameraFrames::getFPS(double &value) {
+
+ value = 0;
+ return false;
+
+}
+
+bool CameraFrames::grabImage(Frame &img) {
+
+ bool fileFound = false;
+
+ string filename = "";
+
+ path p(mFramesDir.at(mCurrDirId));
+
+ /// Search a frame in the directory.
+ for(directory_iterator file(p);file!= directory_iterator(); ++file){
+
+ path curr(file->path());
+
+ if(is_regular_file(curr)){
+
+ list ch;
+ string fname = curr.filename().string();
+ Conversion::stringTok(ch, fname.c_str(), "_");
+ list::const_iterator lit(ch.begin()), lend(ch.end());
+ int i = 0;
+ int number = 0;
+
+ for(; lit != lend; ++lit){
+
+ if(i == mNumFramePos && i != ch.size() - 1){
+
+ number = atoi((*lit).c_str()); break;
+ }
+
+ if(i == ch.size() - 1){
+
+ list ch_;
+ Conversion::stringTok(ch_, (*lit).c_str(), ".");
+ number = atoi(ch_.front().c_str());
+ break;
+
+ }
+
+ i++;
+
+ }
+
+ if(number == mFirstFrameNum){
+
+ mFirstFrameNum++;
+ fileFound = true;
+
+ cout << "FILE:" << file->path().string() << endl;
+ BOOST_LOG_SEV(logger, normal) << "FILE:" << file->path().string();
+
+ filename = file->path().string() ;
+
+ break;
+
+ }
+ }
+ }
+
+
+ if(mFirstFrameNum > mLastFrameNum || !fileFound){
+
+ mReadDataStatus = true;
+ BOOST_LOG_SEV(logger, normal) << "End read frames.";
+ return false;
+
+ }else{
+
+ BOOST_LOG_SEV(logger, normal) << "Frame found.";
+
+ Fits2D newFits(filename);
+ int bitpix;
+
+ if(!newFits.readIntKeyword("BITPIX", bitpix)){
+ BOOST_LOG_SEV(logger, fail) << " Fail to read fits keyword : BITPIX";
+
+ return false;
+ }
+
+ /// Read the frame.
+
+ Mat resMat;
+ CamPixFmt frameFormat = MONO8;
+
+ switch(bitpix){
+
+ case 8 :
+
+ frameFormat = MONO8;
+ newFits.readFits8UC(resMat);
+
+ break;
+
+ case 16 :
+
+ frameFormat = MONO12;
+ newFits.readFits16S(resMat);
+
+ break;
+
+ }
+
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+
+ Frame f = Frame(resMat, 0, 0, to_iso_extended_string(time));
+
+ img = f;
+
+ img.mFrameNumber = mFirstFrameNum -1 ;
+ img.mFrameRemaining = mLastFrameNum - mFirstFrameNum-1;
+ img.mFps = 1;
+ img.mFormat = frameFormat;
+
+ //waitKey(1000);
+
+
+ return true;
+
+ }
+
+}
diff --git a/CameraFrames.h b/CameraFrames.h
new file mode 100644
index 0000000..96f9801
--- /dev/null
+++ b/CameraFrames.h
@@ -0,0 +1,128 @@
+/*
+ CameraFrames.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/10/2014
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraFrames.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 02/09/2014
+* \brief Fits frames in input of acquisition thread.
+*/
+
+#pragma once
+#include "config.h"
+#include "opencv2/highgui/highgui.hpp"
+#include
+
+#ifdef LINUX
+#define BOOST_LOG_DYN_LINK 1
+#endif
+
+#include
+#include "ELogSeverityLevel.h"
+#include "Conversion.h"
+#include "TimeDate.h"
+#include "Frame.h"
+#include "Fits2D.h"
+#include "Fits.h"
+#include
+#include
+#include
+#include
+
+#include "Camera.h"
+
+using namespace boost::posix_time;
+using namespace cv;
+using namespace std;
+
+class CameraFrames: public Camera {
+
+ private:
+
+ static boost::log::sources::severity_logger< LogSeverityLevel > logger;
+
+ static class Init {
+
+ public:
+
+ Init() {
+
+ logger.add_attribute("ClassName", boost::log::attributes::constant("CameraFrames"));
+
+ }
+
+ } initializer;
+
+ bool searchMinMaxFramesNumber(string location);
+
+ vector mFramesDir; // List of frames directories to process.
+ int mNumFramePos; // Position of the frame number in its filename.
+ int mFirstFrameNum; // First frame number in a directory.
+ int mLastFrameNum; // Last frame number in a directory.
+ bool mReadDataStatus; // Signal the end of reading data in a directory.
+ int mCurrDirId; // Id of the directory to use.
+ string mCurrDir; // Path of the directory to use.
+
+ public:
+
+ CameraFrames(vector locationList, int numPos, bool verbose);
+
+ ~CameraFrames();
+
+ bool acqStart() {return true;};
+
+ bool createDevice(int id) { return true;};
+
+ bool listCameras() {return true;};
+
+ bool grabInitialization();
+
+ bool grabImage(Frame &img);
+
+ bool getStopStatus();
+
+ bool loadNextDataSet(string &location);
+
+ bool getDataSetStatus();
+
+ bool getFPS(double &value);
+
+ bool setExposureTime(double exp){return true;};
+
+ bool setGain(int gain) {return true;};
+
+ bool setFPS(double fps){return true;};
+
+ bool setPixelFormat(CamPixFmt format){return true;};
+
+ bool setSize(int width, int height, bool customSize) {return true;};
+
+ bool getCameraName();
+
+};
+
diff --git a/CameraGigeAravis.cpp b/CameraGigeAravis.cpp
new file mode 100644
index 0000000..d5129d2
--- /dev/null
+++ b/CameraGigeAravis.cpp
@@ -0,0 +1,1059 @@
+/*
+ CameraGigeAravis.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2016 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 16/05/2016
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraGigeAravis.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 16/05/2016
+* \brief Use Aravis library to pilot GigE Cameras.
+* https://wiki.gnome.org/action/show/Projects/Aravis?action=show&redirect=Aravis
+*/
+
+#include "CameraGigeAravis.h"
+
+#ifdef LINUX
+
+ boost::log::sources::severity_logger< LogSeverityLevel > CameraGigeAravis::logger;
+ CameraGigeAravis::Init CameraGigeAravis::initializer;
+
+ CameraGigeAravis::CameraGigeAravis(bool shift):
+ camera(NULL), mWidth(0), mHeight(0), fps(0), gainMin(0.0), gainMax(0.0),
+ payload(0), exposureMin(0), exposureMax(0), gain(0), exp(0), nbCompletedBuffers(0),
+ nbFailures(0), nbUnderruns(0), frameCounter(0), shiftBitsImage(shift), stream(NULL) {
+ mExposureAvailable = true;
+ mGainAvailable = true;
+ mInputDeviceType = CAMERA;
+ }
+
+ CameraGigeAravis::CameraGigeAravis():
+ camera(NULL), mWidth(0), mHeight(0), fps(0), gainMin(0.0), gainMax(0.0),
+ payload(0), exposureMin(0), exposureMax(0), gain(0), exp(0), nbCompletedBuffers(0),
+ nbFailures(0), nbUnderruns(0), frameCounter(0), shiftBitsImage(false), stream(NULL) {
+ mExposureAvailable = true;
+ mGainAvailable = true;
+ mInputDeviceType = CAMERA;
+ }
+
+ CameraGigeAravis::~CameraGigeAravis(){
+
+ if(stream != NULL)
+ g_object_unref(stream);
+
+ if(camera != NULL)
+ g_object_unref(camera);
+
+ }
+
+ vector> CameraGigeAravis::getCamerasList() {
+
+ vector> camerasList;
+
+ ArvInterface *interface;
+
+ //arv_update_device_list();
+
+ int ni = arv_get_n_interfaces();
+
+
+ for (int j = 0; j< ni; j++){
+
+ const char* name = arv_get_interface_id (j);
+ if (strcmp(name,"GigEVision") == 0) {
+ interface = arv_gv_interface_get_instance();
+ arv_interface_update_device_list(interface);
+ //int nb = arv_get_n_devices();
+
+ int nb = arv_interface_get_n_devices(interface);
+
+ for(int i = 0; i < nb; i++){
+
+ pair c;
+ c.first = i;
+ //const char* str = arv_get_device_id(i);
+ const char* str = arv_interface_get_device_id(interface,i);
+ const char* addr = arv_interface_get_device_address(interface,i);
+ std::string s = str;
+ c.second = "NAME[" + s + "] SDK[ARAVIS] IP: " + addr;
+ camerasList.push_back(c);
+ }
+ }
+ }
+
+ return camerasList;
+
+ }
+
+ bool CameraGigeAravis::listCameras(){
+
+ ArvInterface *interface;
+ //arv_update_device_list();
+
+ int ni = arv_get_n_interfaces ();
+
+ cout << endl << "------------ GIGE CAMERAS WITH ARAVIS ----------" << endl << endl;
+
+ for (int j = 0; j< ni; j++){
+
+ interface = arv_gv_interface_get_instance();
+ arv_interface_update_device_list(interface);
+ //int nb = arv_get_n_devices();
+
+ int nb = arv_interface_get_n_devices(interface);
+ for(int i = 0; i < nb; i++){
+
+ cout << "-> [" << i << "] " << arv_interface_get_device_id(interface,i)<< endl;
+ //cout << "-> [" << i << "] " << arv_get_device_id(i)<< endl;
+
+ }
+
+ if(nb == 0) {
+ cout << "-> No cameras detected..." << endl;
+ return false;
+ }
+ }
+ cout << endl << "------------------------------------------------" << endl << endl;
+
+ return true;
+
+ }
+
+ bool CameraGigeAravis::createDevice(int id){
+
+ string deviceName;
+
+ if(!getDeviceNameById(id, deviceName))
+ return false;
+
+ camera = arv_camera_new(deviceName.c_str());
+
+ if(camera == NULL){
+
+ BOOST_LOG_SEV(logger, fail) << "Fail to connect the camera.";
+ return false;
+
+ }
+
+ return true;
+ }
+
+ bool CameraGigeAravis::setSize(int width, int height, bool customSize) {
+
+ if(customSize) {
+
+ arv_camera_set_region(camera, 0, 0,width,height);
+ arv_camera_get_region (camera, NULL, NULL, &mWidth, &mHeight);
+ BOOST_LOG_SEV(logger, notification) << "Camera region size : " << mWidth << "x" << mHeight;
+
+ // Default is maximum size
+ }else {
+
+ int sensor_width, sensor_height;
+
+ arv_camera_get_sensor_size(camera, &sensor_width, &sensor_height);
+ BOOST_LOG_SEV(logger, notification) << "Camera sensor size : " << sensor_width << "x" << sensor_height;
+
+ arv_camera_set_region(camera, 0, 0,sensor_width,sensor_height);
+ arv_camera_get_region (camera, NULL, NULL, &mWidth, &mHeight);
+
+ }
+
+ return true;
+
+ }
+
+ bool CameraGigeAravis::getDeviceNameById(int id, string &device){
+
+ arv_update_device_list();
+
+ int n_devices = arv_get_n_devices();
+
+ for(int i = 0; i< n_devices; i++){
+
+ if(id == i){
+
+ device = arv_get_device_id(i);
+ return true;
+
+ }
+ }
+
+ BOOST_LOG_SEV(logger, fail) << "Fail to retrieve camera with this ID.";
+ return false;
+
+ }
+
+ bool CameraGigeAravis::grabInitialization(){
+
+ frameCounter = 0;
+
+ payload = arv_camera_get_payload (camera);
+ BOOST_LOG_SEV(logger, notification) << "Camera payload : " << payload;
+
+ pixFormat = arv_camera_get_pixel_format(camera);
+
+ arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax);
+ BOOST_LOG_SEV(logger, notification) << "Camera exposure bound min : " << exposureMin;
+ BOOST_LOG_SEV(logger, notification) << "Camera exposure bound max : " << exposureMax;
+
+ arv_camera_get_gain_bounds (camera, &gainMin, &gainMax);
+ BOOST_LOG_SEV(logger, notification) << "Camera gain bound min : " << gainMin;
+ BOOST_LOG_SEV(logger, notification) << "Camera gain bound max : " << gainMax;
+
+ arv_camera_set_frame_rate(camera, 30);
+
+ fps = arv_camera_get_frame_rate(camera);
+ BOOST_LOG_SEV(logger, notification) << "Camera frame rate : " << fps;
+
+ capsString = arv_pixel_format_to_gst_caps_string(pixFormat);
+ BOOST_LOG_SEV(logger, notification) << "Camera format : " << capsString;
+
+ gain = arv_camera_get_gain(camera);
+ BOOST_LOG_SEV(logger, notification) << "Camera gain : " << gain;
+
+ exp = arv_camera_get_exposure_time(camera);
+ BOOST_LOG_SEV(logger, notification) << "Camera exposure : " << exp;
+
+ cout << endl;
+
+ cout << "DEVICE SELECTED : " << arv_camera_get_device_id(camera) << endl;
+ cout << "DEVICE NAME : " << arv_camera_get_model_name(camera) << endl;
+ cout << "DEVICE VENDOR : " << arv_camera_get_vendor_name(camera) << endl;
+ cout << "PAYLOAD : " << payload << endl;
+ cout << "Width : " << mWidth << endl
+ << "Height : " << mHeight << endl;
+ cout << "Exp Range : [" << exposureMin << " - " << exposureMax << "]" << endl;
+ cout << "Exp : " << exp << endl;
+ cout << "Gain Range : [" << gainMin << " - " << gainMax << "]" << endl;
+ cout << "Gain : " << gain << endl;
+ cout << "Fps : " << fps << endl;
+ cout << "Type : " << capsString << endl;
+
+ cout << endl;
+
+ // Create a new stream object. Open stream on Camera.
+ stream = arv_camera_create_stream(camera, NULL, NULL);
+
+ if(stream == NULL){
+
+ BOOST_LOG_SEV(logger, critical) << "Fail to create stream with arv_camera_create_stream()";
+ return false;
+
+ }
+
+ if (ARV_IS_GV_STREAM(stream)){
+
+ bool arv_option_auto_socket_buffer = true;
+ bool arv_option_no_packet_resend = true;
+ unsigned int arv_option_packet_timeout = 20;
+ unsigned int arv_option_frame_retention = 100;
+
+ if(arv_option_auto_socket_buffer){
+
+ g_object_set(stream,
+ // ARV_GV_STREAM_SOCKET_BUFFER_FIXED : socket buffer is set to a given fixed value.
+ // ARV_GV_STREAM_SOCKET_BUFFER_AUTO: socket buffer is set with respect to the payload size.
+ "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO,
+ // Socket buffer size, in bytes.
+ // Allowed values: >= G_MAXULONG
+ // Default value: 0
+ "socket-buffer-size", 0, NULL);
+
+ }
+
+ if(arv_option_no_packet_resend){
+
+ // # packet-resend : Enables or disables the packet resend mechanism
+
+ // If packet resend is disabled and a packet has been lost during transmission,
+ // the grab result for the returned buffer holding the image will indicate that
+ // the grab failed and the image will be incomplete.
+ //
+ // If packet resend is enabled and a packet has been lost during transmission,
+ // a request is sent to the camera. If the camera still has the packet in its
+ // buffer, it will resend the packet. If there are several lost packets in a
+ // row, the resend requests will be combined.
+
+ g_object_set(stream,
+ // ARV_GV_STREAM_PACKET_RESEND_NEVER: never request a packet resend
+ // ARV_GV_STREAM_PACKET_RESEND_ALWAYS: request a packet resend if a packet was missing
+ // Default value: ARV_GV_STREAM_PACKET_RESEND_ALWAYS
+ "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);
+
+ }
+
+ g_object_set(stream,
+ // # packet-timeout
+
+ // The Packet Timeout parameter defines how long (in milliseconds) we will wait for
+ // the next expected packet before it initiates a resend request.
+
+ // Packet timeout, in µs.
+ // Allowed values: [1000,10000000]
+ // Default value: 40000
+ "packet-timeout",/* (unsigned) arv_option_packet_timeout * 1000*/(unsigned)40000,
+ // # frame-retention
+
+ // The Frame Retention parameter sets the timeout (in milliseconds) for the
+ // frame retention timer. Whenever detection of the leader is made for a frame,
+ // the frame retention timer starts. The timer resets after each packet in the
+ // frame is received and will timeout after the last packet is received. If the
+ // timer times out at any time before the last packet is received, the buffer for
+ // the frame will be released and will be indicated as an unsuccessful grab.
+
+ // Packet retention, in µs.
+ // Allowed values: [1000,10000000]
+ // Default value: 200000
+ "frame-retention", /*(unsigned) arv_option_frame_retention * 1000*/(unsigned) 200000,NULL);
+
+ }else
+ return false;
+
+ // Push 50 buffer in the stream input buffer queue.
+ for (int i = 0; i < 50; i++)
+ arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL));
+
+ return true;
+
+ }
+
+ void CameraGigeAravis::grabCleanse(){}
+
+ bool CameraGigeAravis::acqStart(){
+
+ BOOST_LOG_SEV(logger, notification) << "Set camera to CONTINUOUS MODE";
+ arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_CONTINUOUS);
+
+ BOOST_LOG_SEV(logger, notification) << "Set camera TriggerMode to Off";
+ arv_device_set_string_feature_value(arv_camera_get_device (camera), "TriggerMode" , "Off");
+
+ BOOST_LOG_SEV(logger, notification) << "Start acquisition on camera";
+ arv_camera_start_acquisition(camera);
+
+ return true;
+
+ }
+
+ void CameraGigeAravis::acqStop(){
+
+ arv_stream_get_statistics(stream, &nbCompletedBuffers, &nbFailures, &nbUnderruns);
+
+ //cout << "Completed buffers = " << (unsigned long long) nbCompletedBuffers << endl;
+ //cout << "Failures = " << (unsigned long long) nbFailures << endl;
+ //cout << "Underruns = " << (unsigned long long) nbUnderruns << endl;
+
+ BOOST_LOG_SEV(logger, notification) << "Completed buffers = " << (unsigned long long) nbCompletedBuffers;
+ BOOST_LOG_SEV(logger, notification) << "Failures = " << (unsigned long long) nbFailures;
+ BOOST_LOG_SEV(logger, notification) << "Underruns = " << (unsigned long long) nbUnderruns;
+
+ BOOST_LOG_SEV(logger, notification) << "Stopping acquisition...";
+ arv_camera_stop_acquisition(camera);
+ BOOST_LOG_SEV(logger, notification) << "Acquisition stopped.";
+
+ BOOST_LOG_SEV(logger, notification) << "Unreferencing stream.";
+ g_object_unref(stream);
+ stream = NULL;
+
+ BOOST_LOG_SEV(logger, notification) << "Unreferencing camera.";
+ g_object_unref(camera);
+ camera = NULL;
+
+ }
+
+ bool CameraGigeAravis::grabImage(Frame &newFrame){
+
+ ArvBuffer *arv_buffer;
+ //exp = arv_camera_get_exposure_time(camera);
+
+ arv_buffer = arv_stream_timeout_pop_buffer(stream,2000000); //us
+ char *buffer_data;
+ size_t buffer_size;
+
+ if(arv_buffer == NULL){
+
+ throw runtime_error("arv_buffer is NULL");
+ return false;
+
+ }else{
+
+ try{
+
+ if(arv_buffer_get_status(arv_buffer) == ARV_BUFFER_STATUS_SUCCESS){
+
+ //BOOST_LOG_SEV(logger, normal) << "Success to grab a frame.";
+
+ buffer_data = (char *) arv_buffer_get_data (arv_buffer, &buffer_size);
+
+ //Timestamping.
+ //string acquisitionDate = TimeDate::localDateTime(microsec_clock::universal_time(),"%Y:%m:%d:%H:%M:%S");
+ //BOOST_LOG_SEV(logger, normal) << "Date : " << acquisitionDate;
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+ string acquisitionDate = to_iso_extended_string(time);
+
+ //BOOST_LOG_SEV(logger, normal) << "Date : " << acqDateInMicrosec;
+
+ Mat image;
+ CamPixFmt imgDepth = MONO8;
+ int saturateVal = 0;
+
+ if(pixFormat == ARV_PIXEL_FORMAT_MONO_8){
+
+ //BOOST_LOG_SEV(logger, normal) << "Creating Mat 8 bits ...";
+ image = Mat(mHeight, mWidth, CV_8UC1, buffer_data);
+ imgDepth = MONO8;
+ saturateVal = 255;
+
+ }else if(pixFormat == ARV_PIXEL_FORMAT_MONO_12){
+
+ //BOOST_LOG_SEV(logger, normal) << "Creating Mat 16 bits ...";
+ image = Mat(mHeight, mWidth, CV_16UC1, buffer_data);
+ imgDepth = MONO12;
+ saturateVal = 4095;
+
+ //double t3 = (double)getTickCount();
+
+ if(shiftBitsImage){
+
+ //BOOST_LOG_SEV(logger, normal) << "Shifting bits ...";
+
+
+ unsigned short * p;
+
+ for(int i = 0; i < image.rows; i++){
+ p = image.ptr(i);
+ for(int j = 0; j < image.cols; j++)
+ p[j] = p[j] >> 4;
+ }
+
+ //BOOST_LOG_SEV(logger, normal) << "Bits shifted.";
+
+ }
+
+ //t3 = (((double)getTickCount() - t3)/getTickFrequency())*1000;
+ //cout << "> Time shift : " << t3 << endl;
+ }
+
+ //BOOST_LOG_SEV(logger, normal) << "Creating frame object ...";
+ newFrame = Frame(image, gain, exp, acquisitionDate);
+ //BOOST_LOG_SEV(logger, normal) << "Setting date of frame ...";
+ //newFrame.setAcqDateMicro(acqDateInMicrosec);
+ //BOOST_LOG_SEV(logger, normal) << "Setting fps of frame ...";
+ newFrame.mFps = fps;
+ newFrame.mFormat = imgDepth;
+ //BOOST_LOG_SEV(logger, normal) << "Setting saturated value of frame ...";
+ newFrame.mSaturatedValue = saturateVal;
+ newFrame.mFrameNumber = frameCounter;
+ frameCounter++;
+
+ //BOOST_LOG_SEV(logger, normal) << "Re-pushing arv buffer in stream ...";
+ arv_stream_push_buffer(stream, arv_buffer);
+
+ return true;
+
+ }else{
+
+ switch(arv_buffer_get_status(arv_buffer)){
+
+ case 0 :
+ cout << "ARV_BUFFER_STATUS_SUCCESS : the buffer contains a valid image"< 0 && frame.mHeight > 0) {
+
+ arv_camera_set_region(camera, 0, 0,frame.mWidth,frame.mHeight);
+ arv_camera_get_region (camera, NULL, NULL, &mWidth, &mHeight);
+
+ }else{
+
+ int sensor_width, sensor_height;
+
+ arv_camera_get_sensor_size(camera, &sensor_width, &sensor_height);
+
+ // Use maximum sensor size.
+ arv_camera_set_region(camera, 0, 0,sensor_width,sensor_height);
+ arv_camera_get_region (camera, NULL, NULL, &mWidth, &mHeight);
+
+ }
+
+ payload = arv_camera_get_payload (camera);
+
+ pixFormat = arv_camera_get_pixel_format (camera);
+
+ arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax);
+
+ arv_camera_get_gain_bounds (camera, &gainMin, &gainMax);
+
+ arv_camera_set_frame_rate(camera, 1);
+
+ fps = arv_camera_get_frame_rate(camera);
+
+ capsString = arv_pixel_format_to_gst_caps_string(pixFormat);
+
+ gain = arv_camera_get_gain(camera);
+ exp = arv_camera_get_exposure_time(camera);
+
+ cout << endl;
+
+ cout << "DEVICE SELECTED : " << arv_camera_get_device_id(camera) << endl;
+ cout << "DEVICE NAME : " << arv_camera_get_model_name(camera) << endl;
+ cout << "DEVICE VENDOR : " << arv_camera_get_vendor_name(camera) << endl;
+ cout << "PAYLOAD : " << payload << endl;
+ cout << "Width : " << mWidth << endl
+ << "Height : " << mHeight << endl;
+ cout << "Exp Range : [" << exposureMin << " - " << exposureMax << "]" << endl;
+ cout << "Exp : " << exp << endl;
+ cout << "Gain Range : [" << gainMin << " - " << gainMax << "]" << endl;
+ cout << "Gain : " << gain << endl;
+ cout << "Fps : " << fps << endl;
+ cout << "Type : " << capsString << endl;
+
+ cout << endl;
+
+ if(arv_camera_is_gv_device (camera)) {
+
+ // http://www.baslerweb.com/media/documents/AW00064902000%20Control%20Packet%20Timing%20With%20Delays.pdf
+ // https://github.com/GNOME/aravis/blob/06ac777fc6d98783680340f1c3f3ea39d2780974/src/arvcamera.c
+
+ // Configure the inter packet delay to insert between each packet for the current stream
+ // channel. This can be used as a crude flow-control mechanism if the application or the network
+ // infrastructure cannot keep up with the packets coming from the device.
+ arv_camera_gv_set_packet_delay (camera, 4000);
+
+ // Specifies the stream packet size, in bytes, to send on the selected channel for a GVSP transmitter
+ // or specifies the maximum packet size supported by a GVSP receiver.
+ arv_camera_gv_set_packet_size (camera, 1444);
+
+ }
+
+ // Create a new stream object. Open stream on Camera.
+ stream = arv_camera_create_stream(camera, NULL, NULL);
+
+ if(stream != NULL){
+
+ if(ARV_IS_GV_STREAM(stream)){
+
+ bool arv_option_auto_socket_buffer = true;
+ bool arv_option_no_packet_resend = true;
+ unsigned int arv_option_packet_timeout = 20;
+ unsigned int arv_option_frame_retention = 100;
+
+ if(arv_option_auto_socket_buffer){
+
+ g_object_set(stream, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer-size", 0, NULL);
+
+ }
+
+ if(arv_option_no_packet_resend){
+
+ g_object_set(stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);
+
+ }
+
+ g_object_set(stream, "packet-timeout", (unsigned)40000, "frame-retention", (unsigned) 200000,NULL);
+
+ }
+
+ // Push 50 buffer in the stream input buffer queue.
+ arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL));
+
+ // Set acquisition mode to continuous.
+ arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_SINGLE_FRAME);
+
+ // Very usefull to avoid arv buffer timeout status
+ sleep(1);
+
+ // Start acquisition.
+ arv_camera_start_acquisition(camera);
+
+ // Get image buffer.
+ ArvBuffer *arv_buffer = arv_stream_timeout_pop_buffer(stream, frame.mExposure + 5000000); //us
+
+ char *buffer_data;
+ size_t buffer_size;
+
+ cout << ">> Acquisition in progress... (Please wait)" << endl;
+
+ if (arv_buffer != NULL){
+
+ if(arv_buffer_get_status(arv_buffer) == ARV_BUFFER_STATUS_SUCCESS){
+
+ buffer_data = (char *) arv_buffer_get_data (arv_buffer, &buffer_size);
+
+ //Timestamping.
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+
+ if(pixFormat == ARV_PIXEL_FORMAT_MONO_8){
+
+ Mat image = Mat(mHeight, mWidth, CV_8UC1, buffer_data);
+ image.copyTo(frame.mImg);
+
+ }else if(pixFormat == ARV_PIXEL_FORMAT_MONO_12){
+
+ // Unsigned short image.
+ Mat image = Mat(mHeight, mWidth, CV_16UC1, buffer_data);
+
+ // http://www.theimagingsource.com/en_US/support/documentation/icimagingcontrol-class/PixelformatY16.htm
+ // Some sensors only support 10-bit or 12-bit pixel data. In this case, the least significant bits are don't-care values.
+ if(shiftBitsImage){
+ unsigned short * p;
+ for(int i = 0; i < image.rows; i++){
+ p = image.ptr(i);
+ for(int j = 0; j < image.cols; j++) p[j] = p[j] >> 4;
+ }
+ }
+
+ image.copyTo(frame.mImg);
+
+ }
+
+ frame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time));
+ frame.mFps = arv_camera_get_frame_rate(camera);
+
+ res = true;
+
+ }else{
+
+ switch(arv_buffer_get_status(arv_buffer)){
+
+ case 0 :
+
+ cout << "ARV_BUFFER_STATUS_SUCCESS : the buffer contains a valid image"<> Completed buffers = " << (unsigned long long) nbCompletedBuffers << endl;
+ cout << ">> Failures = " << (unsigned long long) nbFailures << endl;
+ //cout << ">> Underruns = " << (unsigned long long) nbUnderruns << endl;
+
+ // Stop acquisition.
+ arv_camera_stop_acquisition(camera);
+
+ g_object_unref(stream);
+ stream = NULL;
+ g_object_unref(camera);
+ camera = NULL;
+
+ }
+
+ return res;
+
+ }
+
+ void CameraGigeAravis::saveGenicamXml(string p){
+
+ const char *xml;
+
+ size_t size;
+
+ xml = arv_device_get_genicam_xml (arv_camera_get_device(camera), &size);
+
+ if (xml != NULL){
+
+ ofstream infFile;
+ string infFilePath = p + "genicam.xml";
+ infFile.open(infFilePath.c_str());
+ infFile << string ( xml, size );
+ infFile.close();
+
+ }
+
+ }
+
+ //https://github.com/GNOME/aravis/blob/b808d34691a18e51eee72d8cac6cfa522a945433/src/arvtool.c
+ void CameraGigeAravis::getAvailablePixelFormats() {
+
+ ArvGc *genicam;
+ ArvDevice *device;
+ ArvGcNode *node;
+
+ if(camera != NULL) {
+
+ device = arv_camera_get_device(camera);
+ genicam = arv_device_get_genicam(device);
+ node = arv_gc_get_node(genicam, "PixelFormat");
+
+ if (ARV_IS_GC_ENUMERATION (node)) {
+
+ const GSList *childs;
+ const GSList *iter;
+ vector pixfmt;
+
+ cout << ">> Device pixel formats :" << endl;
+
+ childs = arv_gc_enumeration_get_entries (ARV_GC_ENUMERATION (node));
+ for (iter = childs; iter != NULL; iter = iter->next) {
+ if (arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (iter->data), NULL)) {
+
+ if(arv_gc_feature_node_is_available (ARV_GC_FEATURE_NODE (iter->data), NULL)) {
+
+ {
+ string fmt = string(arv_gc_feature_node_get_name(ARV_GC_FEATURE_NODE (iter->data)));
+ std::transform(fmt.begin(), fmt.end(),fmt.begin(), ::toupper);
+ pixfmt.push_back(fmt);
+ cout << "- " << fmt << endl;
+
+ }
+ }
+ }
+ }
+
+ // Compare found pixel formats to currently formats supported by freeture
+
+ cout << endl << ">> Available pixel formats :" << endl;
+ EParser fmt;
+
+ for( int i = 0; i != pixfmt.size(); i++ ) {
+
+ if(fmt.isEnumValue(pixfmt.at(i))) {
+
+ cout << "- " << pixfmt.at(i) << " available --> ID : " << fmt.parseEnum(pixfmt.at(i)) << endl;
+
+ }
+
+ }
+
+ }else {
+
+ cout << ">> Available pixel formats not found." << endl;
+
+ }
+
+ g_object_unref(device);
+
+ }
+
+ }
+
+ void CameraGigeAravis::getExposureBounds(double &eMin, double &eMax){
+
+ double exposureMin = 0.0;
+ double exposureMax = 0.0;
+
+ arv_camera_get_exposure_time_bounds(camera, &exposureMin, &exposureMax);
+
+ eMin = exposureMin;
+ eMax = exposureMax;
+
+ }
+
+ double CameraGigeAravis::getExposureTime(){
+
+ return arv_camera_get_exposure_time(camera);
+
+ }
+
+ void CameraGigeAravis::getGainBounds(int &gMin, int &gMax){
+
+ double gainMin = 0.0;
+ double gainMax = 0.0;
+
+ arv_camera_get_gain_bounds(camera, &gainMin, &gainMax);
+
+ gMin = gainMin;
+ gMax = gainMax;
+
+ }
+
+ bool CameraGigeAravis::getPixelFormat(CamPixFmt &format){
+
+ ArvPixelFormat pixFormat = arv_camera_get_pixel_format(camera);
+
+ switch(pixFormat){
+
+ case ARV_PIXEL_FORMAT_MONO_8 :
+
+ format = MONO8;
+
+ break;
+
+ case ARV_PIXEL_FORMAT_MONO_12 :
+
+ format = MONO12;
+
+ break;
+
+ default :
+
+ return false;
+
+ break;
+
+ }
+
+ return true;
+ }
+
+
+ bool CameraGigeAravis::getFrameSize(int &w, int &h) {
+
+ if(camera != NULL) {
+
+ int ww = 0, hh = 0;
+ arv_camera_get_region(camera, NULL, NULL, &ww, &h);
+ w = ww;
+ h = hh;
+
+ }
+
+ return false;
+
+ }
+
+ bool CameraGigeAravis::getFPS(double &value){
+
+ if(camera != NULL) {
+
+ value = arv_camera_get_frame_rate(camera);
+ return true;
+
+ }
+
+ return false;
+
+ }
+
+ string CameraGigeAravis::getModelName(){
+
+ return arv_camera_get_model_name(camera);
+
+ }
+
+ bool CameraGigeAravis::setExposureTime(double val){
+
+ double expMin, expMax;
+
+ arv_camera_get_exposure_time_bounds(camera, &expMin, &expMax);
+
+ if(camera != NULL){
+
+ if(val >= expMin && val <= expMax) {
+
+ exp = val;
+ //val = 10;
+ arv_camera_set_exposure_time(camera, val);
+
+ }else{
+
+ cout << "> Exposure value (" << val << ") is not in range [ " << expMin << " - " << expMax << " ]" << endl;
+ return false;
+
+ }
+
+ return true;
+
+ }
+
+ return false;
+ }
+
+ bool CameraGigeAravis::setGain(int val){
+
+ double gMin, gMax;
+
+ arv_camera_get_gain_bounds (camera, &gMin, &gMax);
+
+ if (camera != NULL){
+
+ if((double)val >= gMin && (double)val <= gMax){
+
+ gain = val;
+ arv_camera_set_gain (camera, (double)val);
+
+ }else{
+
+ cout << "> Gain value (" << val << ") is not in range [ " << gMin << " - " << gMax << " ]" << endl;
+ BOOST_LOG_SEV(logger, fail) << "> Gain value (" << val << ") is not in range [ " << gMin << " - " << gMax << " ]";
+ return false;
+
+ }
+
+ return true;
+
+ }
+
+ return false;
+
+ }
+
+ bool CameraGigeAravis::setFPS(double fps){
+
+ //fps = 30
+ if (camera != NULL){
+
+ arv_camera_set_frame_rate(camera, fps);
+
+ return true;
+
+ }
+
+ return false;
+
+ }
+
+ bool CameraGigeAravis::setPixelFormat(CamPixFmt depth){
+
+ if (camera != NULL){
+
+ switch(depth){
+
+ case MONO8 :
+ {
+ arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_MONO_8);
+ }
+ break;
+
+ case MONO12 :
+ {
+ arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_MONO_12);
+ }
+ break;
+
+ }
+
+ return true;
+ }
+
+ return false;
+
+ }
+
+#endif
diff --git a/CameraGigeAravis.h b/CameraGigeAravis.h
new file mode 100644
index 0000000..cf68987
--- /dev/null
+++ b/CameraGigeAravis.h
@@ -0,0 +1,168 @@
+/* CameraGigeAravis.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2016 Yoan Audureau -- FRIPON-GEOPS-UPSUD
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 21/01/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraGigeAravis.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 21/01/2015
+* \brief Use Aravis library to pilot GigE Cameras.
+* https://wiki.gnome.org/action/show/Projects/Aravis?action=show&redirect=Aravis
+*/
+
+#pragma once
+
+#include "config.h"
+
+#ifdef LINUX
+
+ #include "opencv2/highgui/highgui.hpp"
+ #include
+
+ #include
+ #include
+ #include "Frame.h"
+ #include "TimeDate.h"
+ #include "Camera.h"
+ #include "arv.h"
+ #include "arvinterface.h"
+ #include
+ #include
+ #include "EParser.h"
+
+ #define BOOST_LOG_DYN_LINK 1
+
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include "ELogSeverityLevel.h"
+
+ using namespace cv;
+ using namespace std;
+
+ class CameraGigeAravis: public Camera{
+
+ private:
+
+ static boost::log::sources::severity_logger< LogSeverityLevel > logger;
+
+ static class Init{
+
+ public:
+
+ Init(){
+
+ logger.add_attribute("ClassName", boost::log::attributes::constant("CameraGigeAravis"));
+
+ }
+
+ }initializer;
+
+ ArvCamera *camera; // Camera to control.
+ ArvPixelFormat pixFormat; // Image format.
+ ArvStream *stream; // Object for video stream reception.
+ int mWidth; // Camera region's width.
+ int mHeight; // Camera region's height.
+ double fps; // Camera acquisition frequency.
+ double gainMin; // Camera minimum gain.
+ double gainMax; // Camera maximum gain.
+ unsigned int payload; // Width x height.
+ double exposureMin; // Camera's minimum exposure time.
+ double exposureMax; // Camera's maximum exposure time.
+ const char *capsString;
+ int gain; // Camera's gain.
+ double exp; // Camera's exposure time.
+ bool shiftBitsImage; // For example : bits are shifted for dmk's frames.
+ guint64 nbCompletedBuffers; // Number of frames successfully received.
+ guint64 nbFailures; // Number of frames failed to be received.
+ guint64 nbUnderruns;
+ int frameCounter; // Counter of success received frames.
+
+ public :
+
+ CameraGigeAravis(bool shift);
+
+ CameraGigeAravis();
+
+ ~CameraGigeAravis();
+
+ vector> getCamerasList();
+
+ bool listCameras();
+
+ bool createDevice(int id);
+
+ bool grabInitialization();
+
+ void grabCleanse();
+
+ bool acqStart();
+
+ void acqStop();
+
+ bool grabImage(Frame& newFrame);
+
+ bool grabSingleImage(Frame &frame, int camID);
+
+ bool getDeviceNameById(int id, string &device);
+
+ void getExposureBounds(double &eMin, double &eMax);
+
+ void getGainBounds(int &gMin, int &gMax);
+
+ bool getPixelFormat(CamPixFmt &format);
+
+ bool getFrameSize(int &w, int &h);
+
+ bool getFPS(double &value);
+
+ string getModelName();
+
+ double getExposureTime();
+
+ bool setExposureTime(double exp);
+
+ bool setGain(int gain);
+
+ bool setFPS(double fps);
+
+ bool setPixelFormat(CamPixFmt depth);
+
+ void saveGenicamXml(string p);
+
+ bool setSize(int width, int height, bool customSize);
+
+ void getAvailablePixelFormats();
+
+ };
+
+#endif
diff --git a/CameraGigePylon.cpp b/CameraGigePylon.cpp
new file mode 100644
index 0000000..b3f30e1
--- /dev/null
+++ b/CameraGigePylon.cpp
@@ -0,0 +1,952 @@
+/*
+ CameraGigePylon.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/07/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraGigePylon.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 03/07/2014
+* \brief Use Pylon library to pilot GigE Cameras.
+*/
+
+#include "CameraGigePylon.h"
+
+#ifdef USE_PYLON
+
+boost::log::sources::severity_logger< LogSeverityLevel > CameraGigePylon::logger;
+
+CameraGigePylon::Init CameraGigePylon::initializer;
+
+CameraGigePylon::CameraGigePylon(){
+
+ pCamera = NULL;
+ pStreamGrabber = NULL;
+ connectionStatus = false;
+ mFrameCounter = 0;
+ mExposureAvailable = true;
+ mGainAvailable = true;
+ mInputDeviceType = CAMERA;
+
+ // Enumerate GigE cameras
+ pTlFactory = &CTlFactory::GetInstance();
+ pTl = pTlFactory->CreateTl(CBaslerGigECamera ::DeviceClass());
+ pTl->EnumerateDevices(devices);
+
+}
+
+vector> CameraGigePylon::getCamerasList() {
+
+ vector> camerasList;
+
+ try {
+
+ int id = 0;
+ if(!devices.empty()) {
+ DeviceInfoList_t::const_iterator it;
+ for(it = devices.begin(); it != devices.end(); ++it ) {
+ if(!devices.empty()){
+ if(devices[id].GetFullName().find_first_of("Basler")==0||devices[id].GetFullName().find_first_of("Prosilica")==0) {
+ pair c;
+ c.first = id;
+ c.second = "NAME[" + devices[id].GetModelName() + "] S/N[" + devices[id].GetSerialNumber() + "] SDK[PYLON]";
+ camerasList.push_back(c);
+ }
+ }
+ id++;
+ }
+ }
+
+ }catch (GenICam::GenericException &e){
+
+ BOOST_LOG_SEV(logger,fail) << "An exception occured : " << e.GetDescription() ;
+ cout << "An exception occured : " << e.GetDescription() << endl;
+
+ }
+
+ return camerasList;
+
+}
+
+CameraGigePylon::~CameraGigePylon(void){
+
+ if(pStreamGrabber != NULL){
+ delete pStreamGrabber;
+ }
+
+ if(pCamera != NULL) {
+ if(pCamera->IsOpen()) pCamera->Close();
+ delete pCamera;
+ }
+
+ if(pTlFactory != NULL)
+ pTlFactory->ReleaseTl(pTl);
+
+}
+
+bool CameraGigePylon::listCameras() {
+
+ try {
+
+ cout << endl << "------------ GIGE CAMERAS WITH PYLON -----------" << endl << endl;
+
+ int id = 0;
+ DeviceInfoList_t::const_iterator it;
+
+ for(it = devices.begin(); it != devices.end(); ++it ) {
+ if(!devices.empty()){
+ if(devices[id].GetFullName().find_first_of("Basler")==0||devices[id].GetFullName().find_first_of("Prosilica")==0) {
+ cout << "-> ID[" << id << "] NAME[" << devices[id].GetModelName().c_str() << "] S/N[" << devices[id].GetSerialNumber().c_str() <<"]"<< endl;
+ }
+ }
+ id++;
+ }
+
+ cout << endl << "------------------------------------------------" << endl << endl;
+
+ }catch (GenICam::GenericException &e){
+
+ BOOST_LOG_SEV(logger,fail) << "An exception occured : " << e.GetDescription() ;
+ cout << "An exception occured : " << e.GetDescription() << endl;
+ return false;
+ }
+
+ return true;
+
+}
+
+bool CameraGigePylon::createDevice(int id){
+
+ try {
+
+ if(!devices.empty()) {
+
+ // Create a camera object
+ if(id >= 0 && id < devices.size()){
+ pCamera = new CBaslerGigECamera( pTl->CreateDevice((devices[id]) ));
+ }else {
+ return false;
+ }
+
+ // Open the camera object
+ pCamera->Open();
+
+ if(pCamera->IsOpen())
+ BOOST_LOG_SEV(logger,notification) << "Success to open the device.";
+
+ return true;
+ }
+
+ }catch (GenICam::GenericException &e){
+
+ std::cout << e.GetDescription() << endl;
+ return false;
+ }
+
+ return false;
+
+}
+
+bool CameraGigePylon::getDeviceNameById(int id, string &device) {
+
+ if(!devices.empty()) {
+ cout << " Camera (ID:" << id << ") detected " << endl;
+ cout << " Name : " << devices[id].GetModelName().c_str() << endl;
+ return true;
+ }
+
+ return false;
+
+}
+
+bool CameraGigePylon::grabInitialization(){
+
+ if(pCamera){
+
+ if(pCamera->IsOpen()){
+
+ try{
+
+ //Disable acquisition start trigger if available
+ {
+ GenApi::IEnumEntry* acquisitionStart = pCamera->TriggerSelector.GetEntry( TriggerSelector_AcquisitionStart);
+
+ if ( acquisitionStart && GenApi::IsAvailable( acquisitionStart)){
+
+ pCamera->TriggerSelector.SetValue( TriggerSelector_AcquisitionStart);
+ pCamera->TriggerMode.SetValue( TriggerMode_Off);
+
+ }
+ }
+
+ //Disable frame start trigger if available
+ {
+ GenApi::IEnumEntry* frameStart = pCamera->TriggerSelector.GetEntry( TriggerSelector_FrameStart);
+
+ if ( frameStart && GenApi::IsAvailable( frameStart)){
+
+ pCamera->TriggerSelector.SetValue( TriggerSelector_FrameStart);
+ pCamera->TriggerMode.SetValue( TriggerMode_Off);
+
+ }
+ }
+
+ //Set acquisition mode
+ pCamera->AcquisitionMode.SetValue(AcquisitionMode_Continuous);
+
+ //Set exposure settings
+ pCamera->ExposureMode.SetValue(ExposureMode_Timed);
+
+ if (!pStreamGrabber){
+
+ pStreamGrabber = new (CBaslerGigECamera::StreamGrabber_t)(pCamera->GetStreamGrabber(0));
+
+ }
+
+ pStreamGrabber->Open();
+
+ // Get the image buffer size
+ const size_t ImageSize = (size_t)(pCamera->PayloadSize.GetValue());
+
+ // We won't use image buffers greater than ImageSize
+ pStreamGrabber->MaxBufferSize.SetValue(ImageSize);
+
+ // We won't queue more than nbBuffers image buffers at a time
+ pStreamGrabber->MaxNumBuffer.SetValue(nbBuffers);
+
+ pStreamGrabber->PrepareGrab();
+
+ for (int i = 0; i < nbBuffers; ++i){
+
+ //ppBuffers[i] = new unsigned char[ImageSize];
+ if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){
+
+ ppBuffersUC[i] = new uint8_t[ImageSize];
+ handles[i] = pStreamGrabber->RegisterBuffer(ppBuffersUC[i], ImageSize);
+
+ }
+
+ if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){
+
+ ppBuffersUS[i] = new uint16_t[ImageSize];
+ handles[i] = pStreamGrabber->RegisterBuffer(ppBuffersUS[i], ImageSize);
+
+ }
+
+ pStreamGrabber->QueueBuffer(handles[i], NULL);
+ }
+
+ return true;
+
+ }catch (GenICam::GenericException &e){
+
+ // Error handling.
+ BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription();
+ cout << "An exception occurred." << e.GetDescription() << endl;
+ return false;
+
+ }
+
+
+ }
+ }
+
+ return false;
+
+}
+
+void CameraGigePylon::getAvailablePixelFormats() {
+
+ vector pixfmt;
+
+ if(pCamera != NULL) {
+
+ if(pCamera->IsOpen()) {
+
+ INodeMap *nodemap = pCamera->GetNodeMap();
+ // Access the PixelFormat enumeration type node.
+ CEnumerationPtr pixelFormat( nodemap->GetNode( "PixelFormat"));
+ // Check if the pixel format Mono8 is available.
+ if(IsAvailable(pixelFormat->GetEntryByName( "Mono8")))
+ pixfmt.push_back("MONO8");
+
+ // Check if the pixel format Mono12 is available.
+ if(IsAvailable(pixelFormat->GetEntryByName( "Mono12")))
+ pixfmt.push_back("MONO12");
+
+ std::cout << endl << ">> Available pixel formats :" << endl;
+ EParser fmt;
+
+ for( int i = 0; i != pixfmt.size(); i++ ) {
+ if(fmt.isEnumValue(pixfmt.at(i))) {
+ std::cout << "- " << pixfmt.at(i) << " available --> ID : " << fmt.parseEnum(pixfmt.at(i)) << endl;
+ }
+ }
+ }
+ }
+}
+
+void CameraGigePylon::grabCleanse(){
+
+ if(pCamera){
+
+ if(pCamera->IsOpen()){
+
+ try{
+
+ // Flush the input queue, grabbing may have failed
+ BOOST_LOG_SEV(logger,notification) << "Flush the input queue.";
+
+ if(pStreamGrabber != NULL) {
+
+ pStreamGrabber->CancelGrab();
+
+ // Consume all items from the output queue
+ GrabResult Result;
+
+ while (pStreamGrabber->GetWaitObject().Wait(0)){
+
+ pStreamGrabber->RetrieveResult(Result);
+
+ //if (Result.Status() == Canceled)
+ //BOOST_LOG_SEV(logger,notification) << "Got canceled buffer.";
+
+ }
+
+ // Deregister and free buffers
+ for(int i = 0; i < nbBuffers; ++i){
+
+ pStreamGrabber->DeregisterBuffer(handles[i]);
+
+ //BOOST_LOG_SEV(logger,notification) << "Deregister and free buffer n° "<< i ;
+
+ if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){
+
+ delete [] ppBuffersUC[i];
+
+ }else if (pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){
+
+ delete [] ppBuffersUS[i];
+
+ }
+ }
+
+ // Free all resources used for grabbing
+ pStreamGrabber->FinishGrab();
+ pStreamGrabber->Close();
+
+ if(pStreamGrabber != NULL){
+ delete pStreamGrabber;
+ pStreamGrabber = NULL;
+ }
+
+ if(pCamera != NULL) {
+ pCamera->Close();
+ delete pCamera;
+ pCamera = NULL;
+ }
+
+ if(pTlFactory != NULL)
+ pTlFactory->ReleaseTl(pTl);
+ pTlFactory = NULL;
+ }
+
+ }catch (GenICam::GenericException &e){
+
+ // Error handling.
+ BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription();
+ cout << "An exception occurred." << e.GetDescription() << endl;
+
+ }
+ }
+ }
+}
+
+bool CameraGigePylon::acqStart(){
+
+ if(pCamera!=NULL) {
+ pCamera->AcquisitionStart.Execute();
+ return true;
+ }
+
+ return false;
+
+}
+
+void CameraGigePylon::acqStop(){
+
+ pCamera->AcquisitionStop.Execute();
+
+}
+
+bool CameraGigePylon::grabImage(Frame &newFrame){
+
+ bool res = true;
+
+ if(pStreamGrabber->GetWaitObject().Wait(3000)){
+
+ // Get an item from the grabber's output queue
+ if(!pStreamGrabber->RetrieveResult(result)){
+
+ BOOST_LOG_SEV(logger,fail) << "Fail to retrieve an item from the output queue.";
+ res = false;
+
+ }
+
+ CGrabResultPtr ptrGrabResult;
+
+ if(result.Succeeded()){
+
+ //Timestamping.
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+
+ Mat newImg;
+
+ if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){
+
+ newImg = Mat(pCamera->Height.GetValue(), pCamera->Width.GetValue(), CV_8UC1, Scalar(0));
+
+ }else if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){
+
+ newImg = Mat(pCamera->Height.GetValue(), pCamera->Width.GetValue(), CV_16UC1, Scalar(0));
+
+ }
+
+ memcpy(newImg.ptr(), result.Buffer(), pCamera->PayloadSize.GetValue());
+
+ newFrame = Frame( newImg,
+ pCamera->GainRaw.GetValue(),
+ (double)pCamera->ExposureTimeAbs.GetValue(),
+ to_iso_extended_string(time));
+
+ newFrame.mFps = pCamera->AcquisitionFrameRateAbs.GetValue();
+ newFrame.mFrameNumber = mFrameCounter;
+ mFrameCounter++;
+
+ if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){
+
+ newFrame.mFormat = MONO8;
+ newFrame.mSaturatedValue = 255;
+
+ }else if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){
+
+ newFrame.mFormat = MONO12;
+ newFrame.mSaturatedValue = 4095;
+
+ }
+
+ }else{
+
+ BOOST_LOG_SEV(logger,fail) << "Fail to grab a frame : " << result.GetErrorDescription();
+ res = false;
+
+ }
+
+ // Requeue the buffer
+ pStreamGrabber->QueueBuffer( result.Handle(), result.Context() );
+
+ }else{
+
+ BOOST_LOG_SEV(logger,fail) <<"Fail to grab a frame (timeout) : " << result.GetErrorDescription();
+ res = false;
+ }
+
+ return res;
+
+}
+
+bool CameraGigePylon::setSize(int width, int height, bool customSize) {
+
+ if(pCamera) {
+
+ try{
+
+ if (pCamera->IsAttached() && pCamera->IsOpen()){
+
+ if(customSize) {
+
+ BOOST_LOG_SEV(logger,notification) << "Set custom size to : " << width << "x" << height;
+ pCamera->Width.SetValue(width);
+ pCamera->Height.SetValue(height);
+
+ // Default is maximum size
+ }else {
+
+ BOOST_LOG_SEV(logger,notification) << "Set size to : " << pCamera->Width.GetMax() << "x" << pCamera->Height.GetMax();
+ pCamera->Width.SetValue(pCamera->Width.GetMax());
+ pCamera->Height.SetValue(pCamera->Height.GetMax());
+ }
+
+ return true;
+
+ }else{
+
+ BOOST_LOG_SEV(logger,fail) << "Can't access size image. Camera not opened or not attached." << endl;
+
+ }
+
+ }catch (GenICam::GenericException &e){
+
+ // Error handling
+ BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription();
+
+ }
+ }
+
+ return false;
+
+}
+
+bool CameraGigePylon::grabSingleImage(Frame &frame, int camID){
+
+ try {
+
+ // Enumerate GigE cameras
+ pTlFactory = &CTlFactory::GetInstance();
+ pTl = pTlFactory->CreateTl(CBaslerGigECamera ::DeviceClass());
+
+ if (((camID + 1 ) > pTl->EnumerateDevices(devices)) || camID < 0){
+
+ throw "Camera ID not correct. Can't be found.";
+
+ }else {
+
+ cout << ">> Camera (ID:" << camID << ") found. " << endl;
+
+ }
+
+ // Create an instant camera object with the correct id camera device.
+ CInstantCamera camera( CTlFactory::GetInstance().CreateDevice(devices[camID].GetFullName()));
+
+ INodeMap& nodemap = camera.GetNodeMap();
+
+ // Open the camera for accessing the parameters.
+ camera.Open();
+
+ CIntegerPtr width(nodemap.GetNode("Width"));
+ CIntegerPtr height(nodemap.GetNode("Height"));
+
+ if(frame.mWidth > 0 && frame.mHeight > 0) {
+
+ width->SetValue(frame.mWidth);
+ height->SetValue(frame.mHeight);
+
+ }else{
+
+ // Set width and height to the maximum sensor's size.
+ width->SetValue(width->GetMax());
+ height->SetValue(height->GetMax());
+
+ }
+
+ // Set pixel format.
+ // Access the PixelFormat enumeration type node.
+ CEnumerationPtr pixelFormat(nodemap.GetNode("PixelFormat"));
+
+ if(frame.mFormat == MONO8) {
+
+ if(IsAvailable(pixelFormat->GetEntryByName("Mono8"))){
+ pixelFormat->FromString("Mono8");
+
+ }else{
+ cout << ">> Fail to set pixel format to MONO_8" << endl;
+ return false;
+ }
+
+ }else if(frame.mFormat == MONO12){
+
+ if(IsAvailable(pixelFormat->GetEntryByName("Mono12"))){
+ pixelFormat->FromString("Mono12");
+
+ }else{
+ cout << ">> Fail to set pixel format to MONO_12" << endl;
+ return false;
+ }
+
+ }else{
+
+ cout << ">> No depth specified for the frame container." << endl;
+ return false;
+ }
+
+ CEnumerationPtr exposureAuto( nodemap.GetNode( "ExposureAuto"));
+ if ( IsWritable( exposureAuto)){
+ exposureAuto->FromString("Off");
+ cout << ">> Exposure auto disabled." << endl;
+ }
+
+ // Set exposure.
+ CIntegerPtr ExposureTimeRaw(nodemap.GetNode("ExposureTimeRaw"));
+
+ if(ExposureTimeRaw.IsValid()) {
+
+ if(frame.mExposure >= ExposureTimeRaw->GetMin() && frame.mExposure <= ExposureTimeRaw->GetMax()) {
+
+ ExposureTimeRaw->SetValue(frame.mExposure);
+
+ }else {
+
+ ExposureTimeRaw->SetValue(ExposureTimeRaw->GetMin());
+ cout << ">> Exposure has been setted with the minimum available value." << endl;
+ cout << ">> The available exposure range is [" << ExposureTimeRaw->GetMin() << "-" << ExposureTimeRaw->GetMax() << "] (us)" << endl;
+ }
+
+ }else {
+
+ cout << ">> Fail to set exposure value." << endl;
+ return false;
+ }
+
+ // Disable auto gain.
+
+ CEnumerationPtr gainAuto( nodemap.GetNode( "GainAuto"));
+ if ( IsWritable( gainAuto)){
+ gainAuto->FromString("Off");
+ cout << ">> Gain auto disabled." << endl;
+ }
+
+ // Set gain.
+ // Access the GainRaw integer type node. This node is available for Firewire and GigE Devices.
+ CIntegerPtr gainRaw(nodemap.GetNode("GainRaw"));
+ if(gainRaw.IsValid()) {
+
+ if(frame.mGain >= gainRaw->GetMin() && frame.mGain <= gainRaw->GetMax()) {
+
+ gainRaw->SetValue(frame.mGain);
+
+ }else {
+
+ gainRaw->SetValue(gainRaw->GetMin());
+ cout << ">> Gain has been setted to the minimum available value." << endl;
+ cout << ">> The available gain range is [" << gainRaw->GetMin() << "-" << gainRaw->GetMax() << "]" << endl;
+ }
+ }
+
+ camera.Close();
+
+ // This smart pointer will receive the grab result data.
+ CGrabResultPtr ptrGrabResult;
+
+ cout << ">> Acquisition in progress... (Please wait)" << endl;
+
+ int timeout = 1000 + frame.mExposure/1000;
+
+ camera.GrabOne(timeout , ptrGrabResult);
+
+ Mat newImg;
+
+ // Image grabbed successfully ?
+ if(ptrGrabResult->GrabSucceeded()){
+
+ //Timestamping.
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+ string acqDateInMicrosec = to_iso_extended_string(time);
+
+ frame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time));
+ frame.mFps = 0;
+
+ if(ptrGrabResult->GetPixelType()== PixelType_Mono8) {
+
+ newImg = Mat(ptrGrabResult->GetHeight(), ptrGrabResult->GetWidth(), CV_8UC1, Scalar(0));
+
+ }else if(ptrGrabResult->GetPixelType()== PixelType_Mono12) {
+
+ newImg = Mat(ptrGrabResult->GetHeight(), ptrGrabResult->GetWidth(), CV_16UC1, Scalar(0));
+
+ }
+
+ memcpy(newImg.ptr(), ptrGrabResult->GetBuffer(), ptrGrabResult->GetPayloadSize());
+
+ newImg.copyTo(frame.mImg);
+
+ return true;
+
+ }
+
+ }catch(GenICam::GenericException &e) {
+
+ BOOST_LOG_SEV(logger,fail) << e.GetDescription();
+
+ }catch(exception& e) {
+
+ BOOST_LOG_SEV(logger,fail) << e.what();
+
+ }catch(const char * msg) {
+
+ cout << msg << endl;
+ BOOST_LOG_SEV(logger,fail) << msg;
+
+ }
+
+ if(pTlFactory != NULL) {
+ pTlFactory->ReleaseTl(pTl);
+ pTlFactory = NULL;
+ }
+
+ return false;
+}
+
+void CameraGigePylon::getExposureBounds(double &eMin, double &eMax){
+
+ INodeMap *nodemap = pCamera->GetNodeMap();
+
+ CIntegerPtr exposureTimeRaw(nodemap->GetNode("ExposureTimeRaw"));
+
+ if(exposureTimeRaw.IsValid()) {
+
+ eMin = exposureTimeRaw->GetMin();
+ eMax = exposureTimeRaw->GetMax();
+
+ }
+
+}
+
+void CameraGigePylon::getGainBounds(int &gMin, int &gMax){
+
+ INodeMap *nodemap = pCamera->GetNodeMap();
+
+ CIntegerPtr gainRaw(nodemap->GetNode("GainRaw"));
+
+ if(gainRaw.IsValid()) {
+
+ gMin = gainRaw->GetMin();
+ gMax = gainRaw->GetMax();
+
+ }
+
+}
+
+bool CameraGigePylon::getPixelFormat(CamPixFmt &format){
+
+ if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){
+
+ format = MONO8;
+ return true;
+
+ }else if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){
+
+ format = MONO12;
+
+ return true;
+
+ }
+
+ return false;
+
+}
+
+bool CameraGigePylon::getFrameSize(int &w , int &h) {
+
+ if(pCamera){
+
+ try{
+
+ if (pCamera->IsAttached() && pCamera->IsOpen()){
+
+ w = pCamera->Width.GetValue();
+ h = pCamera->Height.GetValue();
+
+ return true;
+
+ }else{
+
+ BOOST_LOG_SEV(logger,fail) << "Can't access width image. Camera not opened or not attached." << endl;
+
+ }
+
+ }catch (GenICam::GenericException &e){
+
+ // Error handling
+ BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription();
+
+ }
+ }
+
+ return false;
+
+}
+
+bool CameraGigePylon::getFPS(double &value) {
+
+ if(pCamera!=NULL) {
+ value = pCamera->ResultingFrameRateAbs.GetValue();
+ return true;
+ }
+
+ value = 0;
+ return false;
+
+}
+
+string CameraGigePylon::getModelName(){
+ return "";
+}
+
+bool CameraGigePylon::setExposureTime(double exposition) {
+
+ if(pCamera){
+
+ try{
+
+ if( pCamera->IsAttached() && pCamera->IsOpen() ){
+
+ // Check whether auto exposure is available
+ if (IsWritable( pCamera->ExposureAuto)){
+
+ // Disable auto exposure.
+ cout << "Disable ExposureAuto." << endl;
+ pCamera->ExposureAuto.SetValue(ExposureAuto_Off);
+
+ }
+
+ pCamera->ExposureTimeAbs = exposition;
+
+ }else{
+
+ std::cout << "Camera not opened or not attached" << endl;
+ }
+
+ return true;
+
+ }catch (GenICam::GenericException &e){
+
+ // Error handling
+ BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription();
+ cout << endl << ">> " << e.GetDescription() << endl;
+ return false;
+ }
+ }
+
+ return false;
+
+}
+
+bool CameraGigePylon::setGain(int gain){
+
+ if(pCamera){
+ try{
+
+ if( pCamera->IsAttached() && pCamera->IsOpen() ){
+
+ // Check whether auto exposure is available
+ if (IsWritable( pCamera->GainAuto)){
+
+ // Disable auto exposure.
+ cout << "Disable GainAuto." << endl;
+ pCamera->GainAuto.SetValue(GainAuto_Off);
+
+ }
+
+ pCamera->GainRaw = gain;
+
+ }else{
+
+ BOOST_LOG_SEV(logger,fail) << "Camera not opened or not attached";
+
+ }
+
+ return true;
+
+ }catch (GenICam::GenericException &e){
+
+ // Error handling
+ BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription();
+ cout << endl << ">> " << e.GetDescription() << endl;
+ return false;
+ }
+ }
+
+ return false;
+
+}
+
+bool CameraGigePylon::setFPS(double fps){
+
+ pCamera->AcquisitionFrameRateAbs = fps;
+ return true;
+}
+
+bool CameraGigePylon::setPixelFormat(CamPixFmt format){
+
+ Basler_GigECamera::PixelFormatEnums fpix;
+
+ if(format == MONO8 ){
+
+ fpix = PixelFormat_Mono8;
+
+ }
+ else if (format == MONO12 ){
+
+ fpix = PixelFormat_Mono12;
+
+ }
+
+ if (pCamera){
+
+ try{
+ if(pCamera->IsAttached() && pCamera->IsOpen()){
+
+ pCamera->PixelFormat.SetValue(fpix);
+
+ }else{
+
+ BOOST_LOG_SEV(logger,fail) << "Camera not opened or not attached";
+
+ }
+ }
+ catch (GenICam::GenericException &e){
+
+ // Error handling
+ BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription();
+ cout << endl << ">> " << e.GetDescription() << endl;
+
+ }
+
+ return true;
+ }
+
+ return false;
+
+}
+
+double CameraGigePylon::getExposureTime(){
+
+ if(pCamera!=0)
+ return pCamera->ExposureTimeAbs.GetValue();
+ else
+ return 0;
+
+}
+
+/*
+int CameraGigePylon::getGain(){
+
+ return (int)(pCamera->GainRaw.GetValue());
+
+}*/
+
+
+#endif
diff --git a/CameraGigePylon.h b/CameraGigePylon.h
new file mode 100644
index 0000000..27b75b7
--- /dev/null
+++ b/CameraGigePylon.h
@@ -0,0 +1,155 @@
+/*
+ CameraGigePylon.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/10/2014
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+ * \file CameraGigePylon.cpp
+ * \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+ * \version 1.0
+ * \date 03/07/2014
+ * \brief Use Pylon library to pilot GigE Cameras.
+ */
+
+#pragma once
+
+#include "config.h"
+
+#ifdef USE_PYLON
+
+ #include "Frame.h"
+ #include "TimeDate.h"
+ #include "Conversion.h"
+ #include "SaveImg.h"
+ #include "Camera.h"
+ #include "ECamPixFmt.h"
+ #include "EParser.h"
+ #include
+ #include
+ #include
+ #include
+ #include "ELogSeverityLevel.h"
+
+ #include
+ #include
+ #include
+
+ using namespace Pylon;
+ using namespace GenApi;
+ using namespace cv;
+ using namespace std;
+ using namespace Basler_GigECameraParams;
+
+ static const uint32_t nbBuffers = 20; // Buffer's number used for grabbing
+
+ class CameraGigePylon : public Camera {
+
+ private :
+
+ static boost::log::sources::severity_logger< LogSeverityLevel > logger;
+
+ static class Init {
+
+ public :
+
+ Init() {
+
+ logger.add_attribute("ClassName", boost::log::attributes::constant("CameraGigePylon"));
+
+ }
+
+ } initializer;
+
+ // Automagically call PylonInitialize and PylonTerminate to ensure the pylon runtime system
+ // is initialized during the lifetime of this object.
+ Pylon::PylonAutoInitTerm autoInitTerm;
+
+ uint8_t* ppBuffersUC[nbBuffers]; // Buffer for the grabbed images in 8 bits format.
+ uint16_t* ppBuffersUS[nbBuffers]; // Buffer for the grabbed images in 16 bits format.
+ StreamBufferHandle handles[nbBuffers];
+ CTlFactory *pTlFactory;
+ ITransportLayer *pTl; // Pointer on the transport layer.
+ CBaslerGigECamera *pCamera; // Pointer on basler camera.
+ CBaslerGigECamera::StreamGrabber_t *pStreamGrabber;
+ DeviceInfoList_t devices;
+ GrabResult result;
+ bool connectionStatus;
+ int mFrameCounter;
+
+ public:
+
+ CameraGigePylon();
+
+ ~CameraGigePylon(void);
+
+ vector> getCamerasList();
+
+ bool listCameras();
+
+ bool createDevice(int id);
+
+ bool getDeviceNameById(int id, string &device);
+
+ bool grabInitialization();
+
+ void grabCleanse();
+
+ bool acqStart();
+
+ void acqStop();
+
+ bool grabImage(Frame& newFrame);
+
+ bool grabSingleImage(Frame &frame, int camID);
+
+ void getExposureBounds(double &eMin, double &eMax);
+
+ void getGainBounds(int &gMin, int &gMax);
+
+ bool getPixelFormat(CamPixFmt &format);
+
+ bool getFrameSize(int &w, int &h);
+
+ bool getFPS(double &value);
+
+ string getModelName();
+
+ bool setExposureTime(double exp);
+
+ bool setGain(int gain);
+
+ bool setFPS(double fps);
+
+ bool setPixelFormat(CamPixFmt format);
+
+ double getExposureTime();
+
+ bool setSize(int width, int height, bool customSize);
+
+ void getAvailablePixelFormats();
+
+ };
+
+#endif
diff --git a/CameraGigeTis.cpp b/CameraGigeTis.cpp
new file mode 100644
index 0000000..add4d5e
--- /dev/null
+++ b/CameraGigeTis.cpp
@@ -0,0 +1,892 @@
+/*
+ CameraGigeTis.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 21/01/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraGigeTis.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 21/01/2015
+* \brief Use Imaging source sdk to pilot GigE Cameras.
+* https://wiki.gnome.org/action/show/Projects/Aravis?action=show&redirect=Aravis
+*/
+
+#include "CameraGigeTis.h"
+
+#ifdef WINDOWS
+
+ boost::log::sources::severity_logger< LogSeverityLevel > CameraGigeTis::logger;
+
+ CameraGigeTis::Init CameraGigeTis::initializer;
+
+ CameraGigeTis::CameraGigeTis(){
+
+ if(!DShowLib::InitLibrary())
+ throw "Fail DShowLib::InitLibrary().";
+
+ m_pGrabber = new DShowLib::Grabber();
+ mFrameCounter = 0;
+ mGain = 0;
+ mExposure = 0;
+ mFPS = 30;
+ mImgDepth = MONO8;
+ mSaturateVal = 0;
+ mGainMin = -1;
+ mGainMax = -1;
+ mExposureMin = -1;
+ mExposureMax = -1;
+
+ mExposureAvailable = true;
+ mGainAvailable = true;
+ mInputDeviceType = CAMERA;
+
+ }
+
+ vector> CameraGigeTis::getCamerasList() {
+
+ vector> camerasList;
+
+ // Retrieve a list with the video capture devices connected to the computer.
+ pVidCapDevList = m_pGrabber->getAvailableVideoCaptureDevices();
+
+ // Print available devices.
+ for(int i = 0; i < pVidCapDevList->size(); i++) {
+
+ LARGE_INTEGER iSerNum;
+ if(pVidCapDevList->at(i).getSerialNumber(iSerNum.QuadPart) == false) iSerNum.QuadPart = 0;
+ std::ostringstream ossSerNum;
+ ossSerNum << std::hex << iSerNum.QuadPart;
+ string SerNum = ossSerNum.str();
+
+ pair c;
+ c.first = i;
+ c.second = "NAME[" + pVidCapDevList->at(i).getName() + "] S/N[" + SerNum + "] SDK[TIS]";
+ camerasList.push_back(c);
+
+ }
+
+ return camerasList;
+
+ }
+
+ // https://valelab.ucsf.edu/svn/micromanager2/branches/micromanager1.3/DeviceAdapters/TISCam/SimplePropertyAccess.cpp
+ DShowLib::tIVCDRangePropertyPtr CameraGigeTis::getPropertyRangeInterface( _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr& pItems, const GUID& id ){
+
+ GUID itemID = id;
+ GUID elemID = DShowLib::VCDElement_Value;
+
+ DShowLib::tIVCDPropertyElementPtr pFoundElement = pItems->findElement( itemID, elemID );
+
+ if( pFoundElement != 0 ){
+
+ DShowLib::tIVCDRangePropertyPtr pRange;
+
+ if( pFoundElement->getInterfacePtr( pRange ) != 0 ) {
+ return pRange;
+ }
+ }
+ return 0;
+ }
+
+ bool CameraGigeTis::propertyIsAvailable( const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer ){
+
+ return m_pItemContainer->findItem( id ) != 0;
+
+ }
+
+ long CameraGigeTis::getPropertyValue( const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer ){
+
+ long rval = 0;
+ DShowLib::tIVCDRangePropertyPtr pRange = getPropertyRangeInterface( m_pItemContainer, id );
+ if( pRange != 0 ){
+ rval = pRange->getValue();
+ }
+ return rval;
+
+ }
+
+ void CameraGigeTis::setPropertyValue( const GUID& id, long val, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer ){
+
+ DShowLib::tIVCDRangePropertyPtr pRange = getPropertyRangeInterface( m_pItemContainer, id );
+
+ if( pRange != 0 ) {
+ pRange->setValue( val );
+ }
+ }
+
+ long CameraGigeTis::getPropertyRangeMin( const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer ){
+
+ long rval = 0;
+ DShowLib::tIVCDRangePropertyPtr pRange = getPropertyRangeInterface( m_pItemContainer, id );
+
+ if( pRange != 0 ){
+ rval = pRange->getRangeMin();
+ }
+ return rval;
+ }
+
+ long CameraGigeTis::getPropertyRangeMax(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer){
+
+ long rval = 0;
+ DShowLib:: tIVCDRangePropertyPtr pRange = getPropertyRangeInterface( m_pItemContainer, id );
+
+ if( pRange != 0 ) {
+ rval = pRange->getRangeMax();
+ }
+ return rval;
+ }
+
+ bool CameraGigeTis::setFpsToLowerValue() {
+
+ try {
+
+ // Get list of possible format.
+ DShowLib::Grabber::tFrameRateListPtr VidFpsListPtr = m_pGrabber->getAvailableFrameRates();
+ double chooseValue = 0.0;
+ cout << "Available FPS : | " ;
+ for(int i = 0; i < VidFpsListPtr->size(); i++) {
+
+ double fps = Conversion::roundToNearest((1.0/((float)VidFpsListPtr->at(i) / 1000.0)), 0.25);
+ cout << fps << " | ";
+ if(chooseValue == 0.0) {
+ chooseValue = fps;
+ }else {
+ if(fps < chooseValue) {
+ chooseValue = fps;
+ }
+ }
+
+ }
+ cout << endl;
+
+ if(chooseValue != 0.0) {
+ mFPS = chooseValue;
+ cout << ">> Fps setted to the lower value : " << chooseValue << endl;
+ m_pGrabber->setFPS(chooseValue);
+ return true;
+ }
+
+ }catch(exception& e) {
+
+ BOOST_LOG_SEV(logger,critical) << "An error occured on set lower fps operation.";
+ BOOST_LOG_SEV(logger,critical) << e.what();
+
+ }
+
+ return false;
+
+ }
+
+ bool CameraGigeTis::setFPS(double value) {
+
+ try {
+
+ // Get list of possible format.
+ DShowLib::Grabber::tFrameRateListPtr VidFpsListPtr = m_pGrabber->getAvailableFrameRates();
+ double chooseValue = 0.0;
+ double resPrev = 0.0;
+ cout << "Available FPS : | " ;
+ for(int i = 0; i < VidFpsListPtr->size(); i++) {
+
+ double fps = Conversion::roundToNearest((1.0/((float)VidFpsListPtr->at(i) / 1000.0)), 0.25);
+ cout << fps << " | ";
+ if(resPrev == 0.0) {
+ resPrev = abs(fps - value);
+ chooseValue = fps;
+ }else {
+ if(resPrev > abs(fps - value)) {
+ resPrev = abs(fps - value);
+ chooseValue = fps;
+ }
+ }
+
+ }
+ cout << endl;
+
+ if(chooseValue != 0.0) {
+ mFPS = chooseValue;
+ cout << ">> Set fps to : " << chooseValue << endl;
+ m_pGrabber->setFPS(chooseValue);
+ return true;
+ }
+
+ }catch(exception& e) {
+
+ BOOST_LOG_SEV(logger,critical) << "An error occured on set fps operation.";
+ BOOST_LOG_SEV(logger,critical) << e.what();
+
+ }
+
+ return false;
+
+ }
+
+ bool CameraGigeTis::createDevice(int id){
+
+ // Retrieve a list with the video capture devices connected to the computer.
+ pVidCapDevList = m_pGrabber->getAvailableVideoCaptureDevices();
+
+ if(pVidCapDevList == 0 || pVidCapDevList->empty()){
+
+ BOOST_LOG_SEV(logger,fail) << "No device available.";
+ return false;
+
+ }else {
+
+ if(((id+1)>pVidCapDevList->size()) || id < 0) {
+
+ BOOST_LOG_SEV(logger,fail) << "Camera ID not correct. Can't be found.";
+ return false;
+
+ }
+
+ // Open the selected video capture device.
+ m_pGrabber->openDev(pVidCapDevList->at(id));
+ return true;
+
+ }
+ }
+
+ bool CameraGigeTis::setPixelFormat(CamPixFmt format) {
+
+ mImgDepth = format;
+
+ vector mono12, mono8;
+
+ // Get list of possible format.
+ DShowLib::Grabber::tVidFmtListPtr VidFmtListPtr = m_pGrabber->getAvailableVideoFormats();
+ string dateDelimiter = " ";
+ cout << "Available Format : " << endl;
+ for(int i = 0; i < VidFmtListPtr->size(); i++) {
+
+ string s = VidFmtListPtr->at(i).c_str();
+ string s1 = s.substr(0, s.find(dateDelimiter));
+ cout << "-> (" << Conversion::intToString(i) << ") " << VidFmtListPtr->at(i).c_str() << endl;
+
+ if(s1 == "Y8" || s1 == "Y800"){
+
+ mono8.push_back(VidFmtListPtr->at(i).c_str());
+
+ }else if(s1 == "Y12" || s1 == "Y16"){
+
+ mono12.push_back(VidFmtListPtr->at(i).c_str());
+
+ }
+ }
+
+ cout << endl;
+
+ switch(format){
+
+ case MONO8 :
+
+ if(mono8.size() == 0)
+ return false;
+
+ m_pGrabber->setVideoFormat(mono8.front());//"Y8 (1280x960-1280x960)");
+
+ // Set the image buffer format to eY800. eY800 means monochrome, 8 bits (1 byte) per pixel.
+ // Let the sink create a matching MemBufferCollection with 1 buffer.
+ pSink = DShowLib::FrameHandlerSink::create( DShowLib::eY800, NUMBER_OF_BUFFERS );
+
+ break;
+
+ case MONO12 :
+
+ if(mono12.size() == 0)
+ return false;
+
+ m_pGrabber->setVideoFormat(mono12.front());//"Y16 (1280x960-1280x960)");
+
+ // Disable overlay.
+ // http://www.theimagingsourceforums.com/archive/index.php/t-319880.html
+ m_pGrabber->setOverlayBitmapPathPosition(DShowLib::ePP_NONE);
+
+ // Set the image buffer format to eY16. eY16 means monochrome, 16 bits (2 byte) per pixel.
+ // Let the sink create a matching MemBufferCollection with 1 buffer.
+ pSink = DShowLib::FrameHandlerSink::create( DShowLib::eY16, NUMBER_OF_BUFFERS );
+
+ break;
+
+ default:
+
+ return false;
+
+ break;
+ }
+
+ return true;
+
+ }
+
+ bool CameraGigeTis::getFPS(double &value){
+
+ value = m_pGrabber->getFPS();
+ return true;
+
+ }
+
+ void CameraGigeTis::getExposureBounds(double &eMin, double &eMax) {
+
+ DShowLib::tIVCDAbsoluteValuePropertyPtr pExposureRange;
+
+ pExposureRange = NULL;
+
+ DShowLib::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties();
+
+ if( pItems != 0 ) {
+
+ // Try to find the exposure item.
+ DShowLib::tIVCDPropertyItemPtr pExposureItem = pItems->findItem( DShowLib::VCDID_Exposure );
+
+ if( pExposureItem != 0 ) {
+
+ // Try to find the value and auto elements
+ DShowLib::tIVCDPropertyElementPtr pExposureValueElement = pExposureItem->findElement( DShowLib::VCDElement_Value );
+
+ // If a value element exists, try to acquire a range interface
+ if( pExposureValueElement != 0 ) {
+
+ pExposureValueElement->getInterfacePtr( pExposureRange );
+
+ eMin = pExposureRange->getRangeMin() * 1000000.0; // in us
+ eMax = pExposureRange->getRangeMax() * 1000000.0; // in us
+
+ }
+ }
+ }
+ }
+
+ void CameraGigeTis::getGainBounds(int &gMin, int &gMax) {
+
+ // Get properties.
+ _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties();
+
+ gMin = (int)getPropertyRangeMin(DShowLib::VCDID_Gain, pItems);
+ gMax = (int)getPropertyRangeMax(DShowLib::VCDID_Gain, pItems);
+
+ }
+
+ // http://www.theimagingsourceforums.com/faq.php?faq=ic_programming
+ bool CameraGigeTis::setExposureTime(double value) {
+
+ // Conversion in seconds
+ value = value / 1000000.0;
+
+ bool bOK = false;
+
+ DShowLib::tIVCDAbsoluteValuePropertyPtr pExposureRange;
+ DShowLib::tIVCDSwitchPropertyPtr pExposureAuto;
+
+ pExposureRange = NULL;
+ pExposureAuto = NULL;
+
+ DShowLib::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties();
+
+ if( pItems != 0 ) {
+ // Try to find the exposure item.
+ DShowLib::tIVCDPropertyItemPtr pExposureItem = pItems->findItem( DShowLib::VCDID_Exposure );
+ if( pExposureItem != 0 ) {
+ // Try to find the value and auto elements
+ DShowLib::tIVCDPropertyElementPtr pExposureValueElement = pExposureItem->findElement( DShowLib::VCDElement_Value );
+ DShowLib::tIVCDPropertyElementPtr pExposureAutoElement = pExposureItem->findElement( DShowLib::VCDElement_Auto );
+
+ // If an auto element exists, try to acquire a switch interface
+ if( pExposureAutoElement != 0 ) {
+ pExposureAutoElement->getInterfacePtr( pExposureAuto );
+ pExposureAuto->setSwitch(false); // Disable auto, otherwise we can not set exposure.
+ }
+
+ // If a value element exists, try to acquire a range interface
+ if( pExposureValueElement != 0 ) {
+
+ pExposureValueElement->getInterfacePtr( pExposureRange );
+
+ mExposureMin = pExposureRange->getRangeMin();
+ mExposureMax = pExposureRange->getRangeMax();
+
+ cout << "Available exposure range : [ " << mExposureMin << " - "<< mExposureMax << " ]" << endl;
+
+ if ( value <= mExposureMin ) {
+ value = mExposureMin + 0.000010;
+ BOOST_LOG_SEV(logger,warning) << "EXPOSURE TIME setted to " << value << ". Available range [" << mExposureMin << " - " << mExposureMax<< "]";
+ } else if( value >= mExposureMax ) {
+ value = mExposureMax;
+ BOOST_LOG_SEV(logger,warning) << "EXPOSURE TIME setted to " << value << ". Available range [" << mExposureMin << " - " << mExposureMax<< "]";
+ }
+
+ // Here we set the the exposure value.
+ cout << ">> Set exposure time to : " << value << endl;
+ pExposureRange->setValue( value);
+ mExposure = value * 1000000.0;
+ bOK = true;
+ }
+ }
+ }
+
+ return bOK;
+ }
+
+ void CameraGigeTis::getAvailablePixelFormats() {
+
+ if(m_pGrabber != NULL) {
+
+ vector pixfmt;
+ EParser fmt;
+ DShowLib::Grabber::tVidFmtListPtr pVidFmtList = m_pGrabber->getAvailableVideoFormats();
+
+ // List the available video formats.
+ for(DShowLib::Grabber::tVidFmtListPtr::value_type::iterator it = pVidFmtList->begin(); it != pVidFmtList->end(); ++it)
+ {
+ string pf = it->c_str();
+
+ if(pf.find("Y8") != std::string::npos) {
+ pixfmt.push_back("MONO8");
+ }
+
+ if(pf.find("Y16") != std::string::npos) {
+ pixfmt.push_back("MONO12");
+ }
+
+ }
+
+ std::cout << endl << ">> Available pixel formats :" << endl;
+
+ for( int i = 0; i != pixfmt.size(); i++ ) {
+ if(fmt.isEnumValue(pixfmt.at(i))) {
+ std::cout << "- " << pixfmt.at(i) << " available --> ID : " << fmt.parseEnum(pixfmt.at(i)) << endl;
+ }
+ }
+ }
+
+ }
+
+ bool CameraGigeTis::setGain(int value) {
+
+ bool bOK = false;
+ DShowLib::tIVCDSwitchPropertyPtr pGainAuto;
+
+ pGainAuto = NULL;
+
+ DShowLib::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties();
+
+ if( pItems != 0 ) {
+
+ // Try to find the gain item.
+ DShowLib::tIVCDPropertyItemPtr pGainItem = pItems->findItem( DShowLib::VCDID_Gain );
+
+ if( pGainItem != 0 ) {
+
+ // Try to find auto elements
+ DShowLib::tIVCDPropertyElementPtr pGainAutoElement = pGainItem->findElement( DShowLib::VCDElement_Auto );
+
+ // If an auto element exists, try to acquire a switch interface
+ if( pGainAutoElement != 0 ) {
+ pGainAutoElement->getInterfacePtr( pGainAuto );
+ pGainAuto->setSwitch(false); // Disable auto, otherwise we can not set gain.
+ }
+
+ mGainMin = (int)getPropertyRangeMin(DShowLib::VCDID_Gain, pItems);
+ mGainMax = (int)getPropertyRangeMax(DShowLib::VCDID_Gain, pItems);
+
+ cout << "Available gain range : [ " << mGainMin << " - "<< mGainMax << " ]" << endl;
+
+ if(value > mGainMax || value < mGainMin){
+
+ BOOST_LOG_SEV(logger,warning) << "Fail to set GAIN. Available range value is " << mGainMin << " to " << mGainMax;
+ cout << endl << ">> Fail to set GAIN. Available range value is " << mGainMin << " to " << mGainMax << endl;
+ value = mGainMin;
+ }
+
+ setPropertyValue(DShowLib::VCDID_Gain, (long)value, pItems);
+ cout << ">> Set gain to : " << value << endl;
+ mGain = value;
+ bOK = true;
+
+ }
+ }
+ return bOK;
+
+ }
+
+ bool CameraGigeTis::grabInitialization() {
+
+ // Set the sink.
+ m_pGrabber->setSinkType(pSink);
+
+ // We use snap mode.
+ pSink->setSnapMode(true);
+
+ // Prepare the live mode, to get the output size if the sink.
+ if(!m_pGrabber->prepareLive(false)){
+
+ std::cerr << "Could not render the VideoFormat into a eY800 sink.";
+ return false;
+ }
+
+ // Retrieve the output type and dimension of the handler sink.
+ // The dimension of the sink could be different from the VideoFormat, when
+ // you use filters.
+ DShowLib::FrameTypeInfo info;
+ pSink->getOutputFrameType(info);
+
+ // Allocate NUMBER_OF_BUFFERS image buffers of the above (info) buffer size.
+ for (int ii = 0; ii < NUMBER_OF_BUFFERS; ++ii) {
+ pBuf[ii] = new BYTE[info.buffersize];
+ assert(pBuf[ii]);
+ }
+
+ // Create a new MemBuffer collection that uses our own image buffers.
+ pCollection = DShowLib::MemBufferCollection::create(info, NUMBER_OF_BUFFERS, pBuf);
+ if (pCollection == 0) return false;
+ if (!pSink->setMemBufferCollection(pCollection)) return false;
+
+ if (!m_pGrabber->startLive(false)) return false;
+
+ return true;
+
+ }
+
+ bool CameraGigeTis::acqStart() {
+
+ if (!m_pGrabber->isLive()) {
+
+ m_pGrabber->startLive(false);
+
+ }
+
+ pSink->snapImages(1,(DWORD)-1);
+
+ return true;
+
+ }
+
+ bool CameraGigeTis::grabImage(Frame &newFrame) {
+
+ Mat newImg;
+
+ // Retrieve the output type and dimension of the handler sink.
+ // The dimension of the sink could be different from the VideoFormat, when
+ // you use filters.
+ DShowLib::FrameTypeInfo info;
+ pSink->getOutputFrameType(info);
+
+ //Timestamping.
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+
+ switch(info.getBitsPerPixel()){
+
+ case 8 :
+
+ {
+
+ newImg = Mat(info.dim.cy, info.dim.cx, CV_8UC1, Scalar(0));
+ pSink->snapImages(1,(DWORD)-1);
+ memcpy(newImg.ptr(), pBuf[0], info.buffersize);
+
+ }
+
+ break;
+
+ case 16 :
+
+ {
+
+ newImg = Mat(info.dim.cy, info.dim.cx, CV_16UC1, Scalar(0));
+
+ pSink->snapImages(1,(DWORD)-1);
+
+ memcpy(newImg.ptr(), pBuf[0], info.buffersize);
+
+ unsigned short * ptr;
+
+ double t = (double)getTickCount();
+
+ for(int i = 0; i < newImg.rows; i++){
+
+ ptr = newImg.ptr(i);
+
+ for(int j = 0; j < newImg.cols; j++){
+
+ ptr[j] = ptr[j] >> 4;
+
+ }
+ }
+
+
+
+ }
+
+ break;
+
+ default:
+
+ return false;
+
+ break;
+ }
+
+ if(newImg.data) {
+
+ newFrame = Frame(newImg, mGain, mExposure, to_iso_extended_string(time));
+
+ newFrame.mFps = mFPS;
+ newFrame.mFormat = mImgDepth;
+ newFrame.mSaturatedValue = mSaturateVal;
+
+ newFrame.mFrameNumber = mFrameCounter;
+ mFrameCounter++;
+
+ return true;
+
+ }
+
+ return false;
+
+ }
+
+ bool CameraGigeTis::setSize(int width, int height, bool customSize) {
+
+ if(customSize){
+
+ }else{
+
+ }
+
+ return true;
+
+ }
+
+ void CameraGigeTis::acqStop() {
+
+ m_pGrabber->stopLive();
+ m_pGrabber->closeDev();
+
+ }
+
+ void CameraGigeTis::grabCleanse() {
+
+ if(m_pGrabber!=NULL)
+ m_pGrabber->closeDev();
+
+ }
+
+ double CameraGigeTis::getExposureTime() {
+
+ DShowLib::tIVCDAbsoluteValuePropertyPtr pExposureRange;
+
+ pExposureRange = NULL;
+
+ DShowLib::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties();
+
+ if( pItems != 0 ) {
+
+ // Try to find the exposure item.
+
+ DShowLib::tIVCDPropertyItemPtr pExposureItem = pItems->findItem( DShowLib::VCDID_Exposure );
+ if( pExposureItem != 0 ) {
+
+ // Try to find the value and auto elements
+ DShowLib::tIVCDPropertyElementPtr pExposureValueElement = pExposureItem->findElement( DShowLib::VCDElement_Value );
+
+ // If a value element exists, try to acquire a range interface
+ if( pExposureValueElement != 0 ) {
+
+ pExposureValueElement->getInterfacePtr( pExposureRange );
+ return (pExposureRange->getValue()/1000000.0);
+
+ }
+ }
+ }
+
+ return 0.0;
+
+ }
+
+ bool CameraGigeTis::getPixelFormat(CamPixFmt &format) {
+
+ if(m_pGrabber->getVideoFormat().getBitsPerPixel() == 8) {
+
+ format = MONO8;
+
+ }else if(m_pGrabber->getVideoFormat().getBitsPerPixel() == 16 || m_pGrabber->getVideoFormat().getBitsPerPixel() == 12) {
+
+ format = MONO12;
+
+ }else {
+
+ return false;
+
+ }
+
+ return true;
+
+ }
+
+ bool CameraGigeTis::grabSingleImage(Frame &frame, int camID) {
+
+ if(!createDevice(camID))
+ return false;
+
+ if(!setPixelFormat(frame.mFormat))
+ return false;
+
+ // Set lower fps value.
+ if(!setFpsToLowerValue())
+ return false;
+
+ if(!setExposureTime(frame.mExposure))
+ return false;
+
+ if(!setGain(frame.mGain))
+ return false;
+
+ cout << ">> Acquisition in progress... (Please wait)" << endl;
+
+ // We use snap mode.
+ pSink->setSnapMode(true);
+
+ // Set the sink.
+ m_pGrabber->setSinkType(pSink);
+
+ // Disable live mode.
+ m_pGrabber->prepareLive(false);
+
+ // Retrieve the output type and dimension of the handler sink.
+ DShowLib::FrameTypeInfo info;
+ pSink->getOutputFrameType(info);
+
+ Mat newImg;
+ DShowLib ::Error e;
+
+ //Timestamping.
+ boost::posix_time::ptime time;
+
+ switch(info.getBitsPerPixel()){
+
+ case 8 :
+
+ {
+
+ newImg = Mat(info.dim.cy, info.dim.cx, CV_8UC1, Scalar(0));
+ BYTE* pBuf[1];
+ // Allocate image buffers of the above calculate buffer size.
+ pBuf[0] = new BYTE[info.buffersize];
+
+ // Create a new MemBuffer collection that uses our own image buffers.
+ pCollection = DShowLib::MemBufferCollection::create( info, 1, pBuf );
+
+ if( pCollection == 0 || !pSink->setMemBufferCollection(pCollection)){
+
+ BOOST_LOG_SEV(logger,critical) << "Could not set the new MemBufferCollection.";
+
+ }else {
+
+ m_pGrabber->startLive();
+
+ e = pSink->snapImages(1);
+
+ if( !e.isError()) {
+ time = boost::posix_time::microsec_clock::universal_time();
+ memcpy(newImg.ptr(), pBuf[0], info.buffersize);
+ }
+ }
+ }
+
+ break;
+
+ case 16 :
+
+ {
+
+ newImg = Mat(info.dim.cy, info.dim.cx, CV_16UC1, Scalar(0));
+ BYTE * pBuf[1];
+ // Allocate image buffers of the above calculate buffer size.
+ pBuf[0] = new BYTE[info.buffersize];
+
+ // Create a new MemBuffer collection that uses our own image buffers.
+ pCollection = DShowLib::MemBufferCollection::create(info, 1, pBuf);
+
+ if(pCollection == 0 || !pSink->setMemBufferCollection(pCollection)){
+
+ BOOST_LOG_SEV(logger,critical) << "Could not set the new MemBufferCollection.";
+
+ }else {
+
+ m_pGrabber->startLive(false);
+
+ e = pSink->snapImages(1);
+
+ if( !e.isError()) {
+
+ time = boost::posix_time::microsec_clock::universal_time();
+ memcpy(newImg.ptr(), pBuf[0], info.buffersize);
+
+ // Shift.
+ unsigned short * ptr;
+ for(int i = 0; i < newImg.rows; i++){
+ ptr = newImg.ptr(i);
+ for(int j = 0; j < newImg.cols; j++){
+ ptr[j] = ptr[j] >> 4;
+ }
+ }
+ }
+ }
+ }
+
+ break;
+
+ default:
+
+ return false;
+
+ break;
+ }
+
+ m_pGrabber->stopLive();
+
+ m_pGrabber->closeDev();
+
+ if( !e.isError()) {
+
+ newImg.copyTo(frame.mImg);
+ frame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time));
+ frame.mFps = 0;
+
+ return true;
+ }
+
+ return false;
+ }
+
+ CameraGigeTis::~CameraGigeTis(){
+
+ DShowLib::ExitLibrary();
+ if(m_pGrabber != NULL)
+ delete m_pGrabber;
+
+ }
+
+#endif
diff --git a/CameraGigeTis.h b/CameraGigeTis.h
new file mode 100644
index 0000000..9b3a20f
--- /dev/null
+++ b/CameraGigeTis.h
@@ -0,0 +1,158 @@
+/* CameraGigeTis.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 21/01/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraGigeTis.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 21/01/2015
+* \brief Use Imaging source sdk to pilot GigE Cameras.
+*/
+
+#pragma once
+
+#include "config.h"
+
+#ifdef WINDOWS
+
+ #include "opencv2/highgui/highgui.hpp"
+ #include
+ #include
+ #include
+ #include "Frame.h"
+ #include "TimeDate.h"
+ #include "Camera.h"
+ #include "EParser.h"
+ #include "ECamPixFmt.h"
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include "ELogSeverityLevel.h"
+ #include "tisudshl.h"
+ #include
+
+ #define NUMBER_OF_BUFFERS 1
+
+ using namespace cv;
+ using namespace std;
+
+ class CameraGigeTis: public Camera {
+
+ private:
+
+ static boost::log::sources::severity_logger< LogSeverityLevel > logger;
+
+ static class Init {
+
+ public:
+
+ Init() {
+
+ logger.add_attribute("ClassName", boost::log::attributes::constant("CameraGigeTis"));
+
+ }
+
+ } initializer;
+
+ DShowLib::Grabber::tVidCapDevListPtr pVidCapDevList;
+ DShowLib::tIVCDRangePropertyPtr getPropertyRangeInterface(_DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr& pItems, const GUID& id);
+ bool propertyIsAvailable(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer);
+ long getPropertyValue(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer);
+ void setPropertyValue(const GUID& id, long val, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer);
+ long getPropertyRangeMin(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer);
+ long getPropertyRangeMax(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer);
+
+ DShowLib::Grabber* m_pGrabber;
+ DShowLib::tFrameHandlerSinkPtr pSink;
+ DShowLib::Grabber::tMemBufferCollectionPtr pCollection;
+ BYTE* pBuf[NUMBER_OF_BUFFERS];
+
+ int mFrameCounter;
+ int mGain;
+ double mExposure;
+ double mFPS;
+ CamPixFmt mImgDepth;
+ int mSaturateVal;
+ int mGainMin;
+ int mGainMax;
+ int mExposureMin;
+ int mExposureMax;
+
+ public:
+
+ CameraGigeTis();
+
+ ~CameraGigeTis();
+
+ vector> getCamerasList();
+
+ bool grabSingleImage(Frame &frame, int camID);
+
+ bool createDevice(int id);
+
+ bool setPixelFormat(CamPixFmt format);
+
+ void getExposureBounds(double &eMin, double &eMax);
+
+ void getGainBounds(int &gMin, int &gMax);
+
+ bool getFPS(double &value);
+
+ bool setExposureTime(double value);
+
+ bool setGain(int value);
+
+ bool setFPS(double value);
+
+ bool setFpsToLowerValue();
+
+ bool grabInitialization();
+
+ bool acqStart();
+
+ bool grabImage(Frame &newFrame);
+
+ void acqStop();
+
+ void grabCleanse();
+
+ bool getPixelFormat(CamPixFmt &format);
+
+ double getExposureTime();
+
+ bool setSize(int width, int height, bool customSize);
+
+ void getAvailablePixelFormats();
+
+ };
+
+#endif
diff --git a/CameraV4l2.cpp b/CameraV4l2.cpp
new file mode 100644
index 0000000..4a67bed
--- /dev/null
+++ b/CameraV4l2.cpp
@@ -0,0 +1,1942 @@
+/*
+ CameraV4l2.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 17/08/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraV4l2.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 17/08/2015
+*/
+
+#include "CameraV4l2.h"
+
+#ifdef LINUX
+
+ enum io_method {
+ IO_METHOD_READ,
+ IO_METHOD_MMAP,
+ IO_METHOD_USERPTR,
+ };
+
+ struct buffer
+ {
+ void *start;
+ size_t length;
+ };
+
+ struct v4l2_buffer buf;
+
+ enum io_method io = IO_METHOD_MMAP;
+
+ struct buffer *buffers = NULL;
+ unsigned int n_buffers;
+ int out_buf = 1;
+ int frame_count = 10;
+ int frame_number = 0;
+
+
+ boost::log::sources::severity_logger< LogSeverityLevel > CameraV4l2::logger;
+ CameraV4l2::Init CameraV4l2::initializer;
+
+ CameraV4l2::CameraV4l2(){
+
+ io_method io = IO_METHOD_MMAP;
+ fd = -1;
+ out_buf = 1;
+ frame_count = 10;
+ frame_number = 0;
+ expMin = 0;
+ expMax = 0;
+ exp =0;
+ gain = 0;
+ gainMin = 0;
+ gainMax = 0;
+ mFrameCounter = 0;
+ mWidth = 640;
+ mHeight = 480;
+ n_buffers = 3;
+
+ mExposureAvailable = true;
+ mGainAvailable = true;
+ mCustomSize = false;
+ mInputDeviceType = CAMERA;
+
+ }
+
+ CameraV4l2::~CameraV4l2(){
+
+
+ }
+
+ bool CameraV4l2::getInfos() {
+
+ struct v4l2_capability caps = {};
+
+ // http://linuxtv.org/downloads/v4l-dvb-apis/vidioc-querycap.html
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) {
+ perror("Querying Capabilities");
+ return false;
+ }
+
+ cout << "Driver name : " << caps.driver << endl;
+ cout << "Device name : " << caps.card << endl;
+ cout << "Device location : " << caps.bus_info << endl;
+ printf ("Driver version : %u.%u.%u\n",(caps.version >> 16) & 0xFF, (caps.version >> 8) & 0xFF, caps.version & 0xFF);
+ cout << "Capabilities : " << caps.capabilities << endl;
+
+ struct v4l2_cropcap cropcap;
+ memset(&cropcap, 0, sizeof(cropcap));
+ cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) {
+ perror("Querying Cropping Capabilities");
+ return false;
+ }
+
+ printf( "Camera Cropping :\n"
+ " Bounds : %dx%d+%d+%d\n"
+ " Default : %dx%d+%d+%d\n"
+ " Aspect : %d/%d\n",
+ cropcap.bounds.width, cropcap.bounds.height, cropcap.bounds.left, cropcap.bounds.top,
+ cropcap.defrect.width, cropcap.defrect.height, cropcap.defrect.left, cropcap.defrect.top,
+ cropcap.pixelaspect.numerator, cropcap.pixelaspect.denominator);
+
+ int support_grbg10 = 0;
+
+ struct v4l2_fmtdesc fmtdesc = {0};
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ char fourcc[5] = {0};
+ char c, e;
+ printf( " FORMAT : CE Desc\n");
+ while (0 == xioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) {
+ strncpy(fourcc, (char *)&fmtdesc.pixelformat, 4);
+ if (fmtdesc.pixelformat == V4L2_PIX_FMT_SGRBG10)
+ support_grbg10 = 1;
+ c = fmtdesc.flags & 1? 'C' : ' ';
+ e = fmtdesc.flags & 2? 'E' : ' ';
+ printf(" %s : %c%c %s\n", fourcc, c, e, fmtdesc.description);
+ fmtdesc.index++;
+ }
+
+ /*struct v4l2_format fmt = {0};
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.width = 640;
+ fmt.fmt.pix.height = 480;
+ //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR24;
+ //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY;
+ fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;
+ fmt.fmt.pix.field = V4L2_FIELD_NONE;
+
+ if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) {
+ perror("Setting Pixel Format");
+ return false;
+ }
+
+ strncpy(fourcc, (char *)&fmt.fmt.pix.pixelformat, 4);
+ printf( "Selected mode :\n"
+ " Width : %d\n"
+ " Height : %d\n"
+ " PixFmt : %s\n"
+ " Field : %d\n",
+ fmt.fmt.pix.width,
+ fmt.fmt.pix.height,
+ fourcc,
+ fmt.fmt.pix.field);*/
+
+ double eMin, eMax; int gMin, gMax;
+ getExposureBounds(eMin, eMax);
+ cout << "Min exposure : " << eMin << endl;
+ cout << "Max exposure : " << eMax << endl;
+
+ getGainBounds(gMin, gMax);
+ cout << "Min gain : " << gMin << endl;
+ cout << "Max gain : " << gMax << endl;
+
+ return true;
+
+ };
+
+ vector> CameraV4l2::getCamerasList() {
+
+ vector> camerasList;
+
+ bool loop = true;
+ bool res = true;
+ int deviceNumber = 0;
+
+ do {
+
+ string devicePathStr = "/dev/video" + Conversion::intToString(deviceNumber);
+
+ // http://stackoverflow.com/questions/230062/whats-the-best-way-to-check-if-a-file-exists-in-c-cross-platform
+
+ if(access(devicePathStr.c_str(), F_OK) != -1 ) {
+
+ // file exists
+
+ // http://stackoverflow.com/questions/4290834/how-to-get-a-list-of-video-capture-devices-web-cameras-on-linux-ubuntu-c
+
+ int fd;
+
+ if((fd = open(devicePathStr.c_str(), O_RDONLY)) == -1){
+ perror("Can't open device");
+ res = false;
+ }else {
+
+ struct v4l2_capability caps = {};
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) {
+ cout << "Fail Querying Capabilities." << endl;
+ perror("Querying Capabilities");
+ res = false;
+ }else {
+
+ pair c;
+ c.first = deviceNumber;
+ std::string s( reinterpret_cast< char const* >(caps.card) ) ;
+ c.second = "NAME[" + s + "] SDK[V4L2]";
+ camerasList.push_back(c);
+
+ }
+ }
+
+ close(fd);
+
+ deviceNumber++;
+
+ } else {
+
+ loop = false;
+
+ }
+
+ }while(loop);
+
+ return camerasList;
+
+ }
+
+ bool CameraV4l2::listCameras() {
+
+ bool loop = true;
+ bool res = true;
+ int deviceNumber = 0;
+
+ cout << endl << "------------ USB2 CAMERAS WITH V4L2 ----------" << endl << endl;
+
+ do {
+
+ string devicePathStr = "/dev/video" + Conversion::intToString(deviceNumber);
+
+ // http://stackoverflow.com/questions/230062/whats-the-best-way-to-check-if-a-file-exists-in-c-cross-platform
+
+ if(access(devicePathStr.c_str(), F_OK) != -1 ) {
+
+ // file exists
+
+ // http://stackoverflow.com/questions/4290834/how-to-get-a-list-of-video-capture-devices-web-cameras-on-linux-ubuntu-c
+
+ int fd;
+
+ if((fd = open(devicePathStr.c_str(), O_RDONLY)) == -1){
+ perror("Can't open device");
+ res = false;
+ }else {
+
+ struct v4l2_capability caps = {};
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) {
+ perror("Querying Capabilities");
+ res = false;
+ }else {
+
+ cout << "-> [" << deviceNumber << "] " << caps.card << endl;
+
+ }
+ }
+
+ close(fd);
+
+ deviceNumber++;
+
+ } else {
+
+ // file doesn't exist
+ if(deviceNumber == 0)
+ cout << "-> No cameras detected ..." << endl;
+ loop = false;
+
+ }
+
+ }while(loop);
+
+ cout << endl << "------------------------------------------------" << endl << endl;
+
+ return res;
+
+ }
+
+ bool CameraV4l2::createDevice(int id){
+
+ string deviceNameStr = "/dev/video" + Conversion::intToString(id);
+ mDeviceName = deviceNameStr.c_str();
+
+ struct stat st;
+
+ if (-1 == stat(mDeviceName, &st)) {
+ fprintf(stderr, "Cannot identify '%s': %d, %s\n", mDeviceName, errno, strerror(errno));
+ return false;
+ }
+
+ if (!S_ISCHR(st.st_mode)) {
+ fprintf(stderr, "%s is no device\n", mDeviceName);
+ return false;
+ }
+
+ fd = open(mDeviceName, O_RDWR /* required */ | O_NONBLOCK, 0);
+
+ if (-1 == fd) {
+ fprintf(stderr, "Cannot open '%s': %d, %s\n", mDeviceName, errno, strerror(errno));
+ return false;
+ }
+
+ getExposureBounds(expMin, expMax);
+ getGainBounds(gainMin, gainMax);
+
+ memset(&mFormat, 0, sizeof(mFormat));
+ mFormat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ // Preserve original settings as set by v4l2-ctl for example
+ if (-1 == xioctl(fd, VIDIOC_G_FMT, &mFormat)){
+ return false;
+ }
+
+ return true;
+
+ }
+
+ bool CameraV4l2::setSize(int width, int height, bool customSize) {
+ mWidth = width;
+ mHeight = height;
+ mCustomSize = customSize;
+ return true;
+ }
+
+ // if customSize = true --> set width and height values passed in argument
+ // if customSize = false --> set maximum size
+ bool CameraV4l2::setSize() {
+
+ int chooseWidth = 0;
+ int chooseHeight = 0;
+ bool discreteSize = false;
+
+ bool res = false;
+
+ struct v4l2_frmsizeenum frmsize;
+ memset(&frmsize, 0, sizeof(frmsize));
+ frmsize.pixel_format = mFormat.fmt.pix.pixelformat; // Necessary to set size.
+
+ while(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) {
+
+ switch(frmsize.type) {
+
+ case V4L2_FRMSIZE_TYPE_DISCRETE :
+
+ if(chooseHeight == 0 && chooseWidth == 0) {
+
+ chooseHeight = frmsize.discrete.height;
+ chooseWidth = frmsize.discrete.width;
+
+ }else {
+
+ if((abs(mWidth - chooseWidth) > abs(mWidth - frmsize.discrete.width)) && (abs(mHeight - chooseHeight) > abs(mHeight - frmsize.discrete.height))) {
+ chooseWidth = frmsize.discrete.width;
+ chooseHeight = frmsize.discrete.height;
+ }
+
+ }
+
+ discreteSize = true;
+ res = true;
+
+ break;
+
+ case V4L2_FRMSIZE_TYPE_CONTINUOUS :
+
+ break;
+
+ case V4L2_FRMSIZE_TYPE_STEPWISE :
+
+ if(mCustomSize) {
+
+ if(mWidth >= frmsize.stepwise.min_width && mWidth <=frmsize.stepwise.max_width) {
+
+ mFormat.fmt.pix.width = mWidth;
+
+ }else {
+
+ mFormat.fmt.pix.width = frmsize.stepwise.max_width;
+
+ }
+
+ if(mHeight >= frmsize.stepwise.min_height && mHeight <=frmsize.stepwise.max_height) {
+
+ mFormat.fmt.pix.height = mHeight;
+
+ }else {
+
+ mFormat.fmt.pix.height = frmsize.stepwise.max_height;
+
+ }
+
+ }else {
+
+ mFormat.fmt.pix.height = frmsize.stepwise.max_height;
+ mFormat.fmt.pix.width = frmsize.stepwise.max_width;
+
+ }
+
+ res = true;
+
+ break;
+
+ }
+
+ frmsize.index++;
+
+ }
+
+ if(discreteSize && res) {
+
+ mFormat.fmt.pix.height = chooseHeight;
+ mFormat.fmt.pix.width = chooseWidth;
+
+ }
+
+
+ return res;
+
+ }
+
+
+ bool CameraV4l2::getDeviceNameById(int id, string &device){
+
+ return false;
+
+ }
+
+ bool CameraV4l2::getCameraName() {
+
+ if(fd != -1) {
+
+ struct v4l2_capability caps = {};
+
+ // http://linuxtv.org/downloads/v4l-dvb-apis/vidioc-querycap.html
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) {
+ perror("Querying Capabilities");
+ return false;
+ }
+
+ cout << "Driver name : " << caps.driver << endl;
+ cout << "Device name : " << caps.card << endl;
+ cout << "Device location : " << caps.bus_info << endl;
+ printf ("Driver version : %u.%u.%u\n",(caps.version >> 16) & 0xFF, (caps.version >> 8) & 0xFF, caps.version & 0xFF);
+ cout << "Capabilities : " << caps.capabilities << endl;
+
+ return true;
+
+ }
+
+ return false;
+
+ }
+
+ bool CameraV4l2::grabInitialization(){
+
+ struct v4l2_capability cap;
+ struct v4l2_cropcap cropcap;
+ struct v4l2_crop crop;
+
+ unsigned int min;
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap))
+ {
+ if (EINVAL == errno)
+ {
+ fprintf(stderr,
+ "%s is no V4L2 device\n",
+ mDeviceName);
+ exit(EXIT_FAILURE);
+ }
+ else
+ {
+ errno_exit("VIDIOC_QUERYCAP");
+ }
+ }
+
+ if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
+ {
+ fprintf(stderr,
+ "%s is no video capture device\n",
+ mDeviceName);
+ exit(EXIT_FAILURE);
+ }
+
+ switch (io)
+ {
+ case IO_METHOD_READ:
+ {
+ if (!(cap.capabilities & V4L2_CAP_READWRITE))
+ {
+ fprintf(stderr,
+ "%s does not support read i/o\n",
+ mDeviceName);
+ exit(EXIT_FAILURE);
+ }
+ break;
+ }
+ case IO_METHOD_MMAP:
+ case IO_METHOD_USERPTR:
+ {
+ if (!(cap.capabilities & V4L2_CAP_STREAMING))
+ {
+ fprintf(stderr, "%s does not support streaming i/o\n",
+ mDeviceName);
+ exit(EXIT_FAILURE);
+ }
+ break;
+ }
+ }
+
+ // Select video input, video standard and tune here.
+
+ memset(&cropcap, 0, sizeof(cropcap));
+
+ cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) {
+ crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ crop.c = cropcap.defrect; // reset to default
+
+ if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) {
+ switch (errno) {
+ case EINVAL:
+ // Cropping not supported.
+ break;
+ default:
+ // Errors ignored.
+ break;
+ }
+ }
+ } else {
+ // Errors ignored.
+ }
+
+ // Set some parameters...SIZE
+
+ if(!setSize())
+ return false;
+
+ if(-1 == xioctl(fd, VIDIOC_S_FMT, &mFormat)) {
+ cout << "Fail to set fmt." << endl;
+ return false;
+ }
+
+ /* Buggy driver paranoia. */
+ min = mFormat.fmt.pix.width * 2;
+ if (mFormat.fmt.pix.bytesperline < min)
+ mFormat.fmt.pix.bytesperline = min;
+ min = mFormat.fmt.pix.bytesperline * mFormat.fmt.pix.height;
+ if (mFormat.fmt.pix.sizeimage < min)
+ mFormat.fmt.pix.sizeimage = min;
+
+ return true;
+
+ }
+
+ void CameraV4l2::grabCleanse(){
+
+ // Uninit device
+
+ unsigned int i;
+
+ if(buffers != NULL) {
+ switch (io) {
+
+ case IO_METHOD_READ:
+ free(buffers[0].start);
+ break;
+
+ case IO_METHOD_MMAP:
+
+ for (i = 0; i < n_buffers; ++i)
+ if (-1 == munmap(buffers[i].start, buffers[i].length))
+ errno_exit("munmap");
+ break;
+
+ case IO_METHOD_USERPTR:
+ for (i = 0; i < n_buffers; ++i)
+ free(buffers[i].start);
+ break;
+ }
+
+ free(buffers);
+ }
+
+ // Close device
+
+ if (-1 == close(fd))
+ errno_exit("close");
+
+ fd = -1;
+
+ }
+
+ bool CameraV4l2::acqStart(){
+
+ // INIT DEVICE
+
+ unsigned int i;
+ enum v4l2_buf_type type;
+
+ switch (io) {
+ case IO_METHOD_READ:
+ init_read(mFormat.fmt.pix.sizeimage);
+ break;
+
+ case IO_METHOD_MMAP:
+ init_mmap();
+ break;
+
+ case IO_METHOD_USERPTR:
+ init_userp(mFormat.fmt.pix.sizeimage);
+ break;
+ }
+
+ // START CAPTURING
+
+ switch (io) {
+ case IO_METHOD_READ:
+ {
+ /* Nothing to do. */
+ break;
+ }
+ case IO_METHOD_MMAP:
+ {
+ for (i = 0; i < n_buffers; ++i)
+ {
+ struct v4l2_buffer buf;
+
+ memset(&buf, 0, sizeof(buf));
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = i;
+
+ if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
+ {
+ errno_exit("VIDIOC_QBUF");
+ return false;
+ }
+ }
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl(fd, VIDIOC_STREAMON, &type))
+ {
+ errno_exit("VIDIOC_STREAMON");
+ return false;
+ }
+ break;
+ }
+ case IO_METHOD_USERPTR:
+ {
+ for (i = 0; i < n_buffers; ++i)
+ {
+ struct v4l2_buffer buf;
+
+ memset(&buf, 0, sizeof(buf));
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_USERPTR;
+ buf.index = i;
+ buf.m.userptr = (unsigned long)buffers[i].start;
+ buf.length = buffers[i].length;
+
+ if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
+ {
+ errno_exit("VIDIOC_QBUF");
+ return false;
+ }
+ }
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl(fd, VIDIOC_STREAMON, &type))
+ {
+ errno_exit("VIDIOC_STREAMON");
+ return false;
+ }
+ break;
+ }
+ }
+
+ return true;
+ }
+
+ void CameraV4l2::acqStop(){
+
+ enum v4l2_buf_type type;
+
+ switch (io)
+ {
+ case IO_METHOD_READ:
+ {
+ /* Nothing to do. */
+ break;
+ }
+ case IO_METHOD_MMAP:
+ case IO_METHOD_USERPTR:
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == xioctl(fd, VIDIOC_STREAMOFF, &type))
+ {
+ errno_exit("VIDIOC_STREAMOFF");
+ }
+ break;
+ }
+
+ }
+
+ bool CameraV4l2::grabImage(Frame &newFrame) {
+
+ unsigned char* ImageBuffer = NULL;
+
+ Mat img = Mat(mFormat.fmt.pix.height,mFormat.fmt.pix.width,CV_8UC1, Scalar(0));
+ size_t s = mFormat.fmt.pix.width*mFormat.fmt.pix.height;
+
+ bool grabSuccess = false;
+
+ for(;;) {
+
+ fd_set fds;
+ struct timeval tv;
+ int r;
+
+ FD_ZERO(&fds);
+ FD_SET(fd, &fds);
+
+ /* Timeout. */
+ tv.tv_sec = 2;
+ tv.tv_usec = 0;
+
+ r = select(fd + 1, &fds, NULL, NULL, &tv);
+
+ if(-1 == r) {
+ if (EINTR == errno)
+ continue;
+ errno_exit("select");
+ }
+
+ if(0 == r) {
+ fprintf(stderr, "select timeout\n");
+ BOOST_LOG_SEV(logger, warning) << "Select timeout !";
+ //exit(EXIT_FAILURE);
+ }
+
+ if(read_frame()) {
+ grabSuccess = true;
+ break;
+ }
+ /* EAGAIN - continue select loop. */
+ }
+
+ if(grabSuccess) {
+
+ ImageBuffer = (unsigned char*)buffers[buf.index].start;
+
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+ newFrame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time));
+
+ double fps = 0;
+ if(getFPS(fps))
+ newFrame.mFps = fps;
+ newFrame.mFormat = MONO8;
+ newFrame.mSaturatedValue = 255;
+ newFrame.mFrameNumber = mFrameCounter;
+ newFrame.mExposure = exp;
+ newFrame.mGain = gain;
+ mFrameCounter++;
+
+ if(!convertImage(ImageBuffer, newFrame.mImg))
+ grabSuccess = false;
+
+ }
+
+ return grabSuccess;
+
+ }
+
+ bool CameraV4l2::grabSingleImage(Frame &frame, int camID){
+
+ createDevice(camID);
+
+ if(frame.mHeight > 0 && frame.mWidth > 0) {
+
+ cout << "Setting size to : " << frame.mWidth << "x" << frame.mHeight << endl;
+ mWidth = frame.mWidth;
+ mHeight = frame.mHeight;
+ mCustomSize = true;
+
+ }
+
+ grabInitialization();
+
+ acqStart();
+
+ cout << ">> Height : " << mFormat.fmt.pix.height << endl;
+ cout << ">> Width : " << mFormat.fmt.pix.width << endl;
+
+ if(!setPixelFormat(frame.mFormat))
+ return false;
+
+ if(expMin != -1 && expMax != -1)
+ setExposureTime(frame.mExposure);
+ if(expMin != -1 && expMax != -1)
+ setGain(frame.mGain);
+
+ unsigned char* ImageBuffer = NULL;
+
+ Mat img = Mat(mFormat.fmt.pix.height,mFormat.fmt.pix.width,CV_8UC1, Scalar(0));
+ size_t s = mFormat.fmt.pix.width*mFormat.fmt.pix.height;
+
+ bool grabSuccess = false;
+
+ for(int i = 0; i< n_buffers; i++) {
+
+
+ for(;;) {
+
+ fd_set fds;
+ struct timeval tv;
+ int r;
+
+ FD_ZERO(&fds);
+ FD_SET(fd, &fds);
+
+ /* Timeout. */
+ int timeout = 2;
+
+ if(frame.mExposure/1000000 > 1)
+ timeout = timeout + (int)(frame.mExposure/1000000);
+
+ tv.tv_sec = timeout;
+ tv.tv_usec = 0;
+
+ r = select(fd + 1, &fds, NULL, NULL, &tv);
+
+ if(-1 == r) {
+ if (EINTR == errno)
+ continue;
+ errno_exit("select");
+ }
+
+ if(0 == r) {
+ fprintf(stderr, "select timeout\n");
+ exit(EXIT_FAILURE);
+ }
+
+ if(read_frame()) {
+ grabSuccess = true;
+ break;
+ }
+ /* EAGAIN - continue select loop. */
+ }
+ }
+
+ if(grabSuccess) {
+
+ ImageBuffer = (unsigned char*)buffers[buf.index].start;
+
+
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+ frame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time));
+
+ double fps = 0;
+ if(getFPS(fps))
+ frame.mFps = fps;
+ frame.mSaturatedValue = 255;
+ frame.mFrameNumber = mFrameCounter;
+
+ cout << "size image buffer : " << sizeof(buffers[buf.index].start) << endl;
+ if(!convertImage(ImageBuffer, frame.mImg))
+ grabSuccess = false;
+
+ }
+
+ acqStop();
+ grabCleanse();
+
+ return grabSuccess;
+
+ }
+
+ bool CameraV4l2::convertImage(unsigned char* buffer, Mat &image) {
+
+ bool res = false;
+
+ if(buffer != NULL) {
+
+ switch(mFormat.fmt.pix.pixelformat) {
+
+ case V4L2_PIX_FMT_GREY :
+
+ {
+
+ image = Mat(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC1, Scalar(0));
+ memcpy(image.ptr(), buffer, mFormat.fmt.pix.width*mFormat.fmt.pix.height);
+ res = true;
+
+ }
+
+ break;
+
+ case V4L2_PIX_FMT_YUYV :
+
+ {
+ unsigned char* bigbuffer = (unsigned char*)malloc(mFormat.fmt.pix.height * mFormat.fmt.pix.width*3*sizeof(char));
+ Mat dispimg(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, bigbuffer);
+ PixFmtConv::YUYV_to_BGR24(buffer, bigbuffer, mFormat.fmt.pix.width, mFormat.fmt.pix.height, mFormat.fmt.pix.bytesperline);
+ cvtColor(dispimg,image,CV_BGR2GRAY);
+ res = true;
+ free(bigbuffer);
+
+ }
+
+ break;
+
+ case V4L2_PIX_FMT_UYVY :
+
+ {
+ unsigned char bigbuffer[mFormat.fmt.pix.height * mFormat.fmt.pix.width*3];
+ PixFmtConv::UYVY_to_BGR24(buffer, bigbuffer, mFormat.fmt.pix.width, mFormat.fmt.pix.height, mFormat.fmt.pix.bytesperline);
+ Mat dispimg(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, bigbuffer);
+ cvtColor(dispimg,image,CV_BGR2GRAY);
+ res = true;
+ }
+
+ break;
+
+ case V4L2_PIX_FMT_RGB565 :
+
+ {
+
+ unsigned char bigbuffer[mFormat.fmt.pix.height * mFormat.fmt.pix.width*3];
+ PixFmtConv::RGB565_to_BGR24(buffer, bigbuffer, mFormat.fmt.pix.width, mFormat.fmt.pix.height);
+ Mat dispimg(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, bigbuffer);
+ cvtColor(dispimg,image,CV_BGR2GRAY);
+ res = true;
+
+ }
+
+ break;
+
+ case V4L2_PIX_FMT_BGR24 :
+
+ {
+ Mat dispimg = Mat(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, buffer);
+ cvtColor(dispimg,image,CV_BGR2GRAY);
+ res = true;
+
+ }
+
+ break;
+
+ case V4L2_PIX_FMT_RGB24 :
+
+ {
+ Mat dispimg = Mat(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, buffer);
+ cvtColor(dispimg,image,CV_BGR2GRAY);
+ res = true;
+
+ }
+
+ break;
+
+ }
+
+ }
+
+ return res;
+
+ }
+
+ void CameraV4l2::getExposureBounds(double &eMin, double &eMax){
+
+ struct v4l2_queryctrl queryctrl;
+ memset(&queryctrl, 0, sizeof(queryctrl));
+ queryctrl.id = V4L2_CID_EXPOSURE_ABSOLUTE;
+
+ if (-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) {
+
+ if (errno != EINVAL) {
+
+ perror("VIDIOC_QUERYCTRL");
+ exit(EXIT_FAILURE);
+
+ } else {
+
+ printf(">> V4L2_CID_EXPOSURE_ABSOLUTE is not supported\n");
+ eMin = -1;
+ eMax = -1;
+
+ }
+
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+
+ printf(">> V4L2_CID_EXPOSURE_ABSOLUTE is not supported\n");
+ eMin = -1;
+ eMax = -1;
+
+ } else {
+
+ /*cout << "Name : " << queryctrl.name << endl;
+ cout << "Min : " << queryctrl.minimum << endl;
+ cout << "Max : " << queryctrl.maximum << endl;
+ cout << "Step : " << queryctrl.step << endl;
+ cout << "Default : " << queryctrl.default_value << endl;
+ cout << "Flags : " << queryctrl.flags << endl;*/
+
+ eMin = queryctrl.minimum;
+ eMax = queryctrl.maximum;
+
+ }
+
+ }
+
+ double CameraV4l2::getExposureTime(){
+
+ struct v4l2_control control;
+ memset(&control, 0, sizeof(control));
+ control.id = V4L2_CID_EXPOSURE_ABSOLUTE;
+
+ if(0 == ioctl(fd, VIDIOC_G_CTRL, &control)) {
+
+ return control.value * 100;
+
+ // Ignore if V4L2_CID_CONTRAST is unsupported
+ } else if (errno != EINVAL) {
+
+ perror("VIDIOC_G_CTRL");
+
+ }
+
+ return 0;
+
+ }
+
+ void CameraV4l2::getGainBounds(int &gMin, int &gMax){
+
+ struct v4l2_queryctrl queryctrl;
+ memset(&queryctrl, 0, sizeof(queryctrl));
+ queryctrl.id = V4L2_CID_GAIN;
+
+ if (-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) {
+
+ if (errno != EINVAL) {
+
+ perror("VIDIOC_QUERYCTRL");
+ exit(EXIT_FAILURE);
+
+ } else {
+
+ printf(">> V4L2_CID_GAIN is not supported\n");
+ gainMin = -1;
+ gainMax = -1;
+
+ }
+
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+
+ printf(">> V4L2_CID_GAIN is not supported\n");
+ gainMin = -1;
+ gainMax = -1;
+
+ } else {
+
+ /*cout << "Name : " << queryctrl.name << endl;
+ cout << "Min : " << queryctrl.minimum << endl;
+ cout << "Max : " << queryctrl.maximum << endl;
+ cout << "Step : " << queryctrl.step << endl;
+ cout << "Default : " << queryctrl.default_value << endl;
+ cout << "Flags : " << queryctrl.flags << endl;*/
+
+ gMin = queryctrl.minimum;
+ gMax = queryctrl.maximum;
+
+ }
+
+ }
+
+ bool CameraV4l2::getPixelFormat(CamPixFmt &format){
+
+ /*char fourcc[5] = {0};
+
+ struct v4l2_format fmt;
+ memset(&fmt, 0, sizeof(fmt));
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.width = mWidth;
+ fmt.fmt.pix.height = mHeight;
+ fmt.fmt.pix.field = V4L2_FIELD_NONE;
+
+ if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)) {
+ perror("Getting Pixel Format");
+ return false;
+ }
+
+ // http://linuxtv.org/downloads/v4l-dvb-apis/V4L2-PIX-FMT-GREY.html
+ if(fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_GREY) {
+
+ strncpy(fourcc, (char *)&fmt.fmt.pix.pixelformat, 4);
+ cout << "Pixel format : V4L2_PIX_FMT_GREY" << endl;
+ format = MONO_8;
+
+ // http://linuxtv.org/downloads/v4l-dvb-apis/V4L2-PIX-FMT-Y12.html
+ }else if(fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_Y12) {
+
+ strncpy(fourcc, (char *)&fmt.fmt.pix.pixelformat, 4);
+ cout << "Pixel format : V4L2_PIX_FMT_Y12" << endl;
+ format = MONO_12;
+
+ }*/
+
+ return true;
+ }
+
+ bool CameraV4l2::getFrameSizeEnum() {
+
+ bool res = false;
+
+ struct v4l2_frmsizeenum frmsize;
+ memset(&frmsize, 0, sizeof(frmsize));
+ frmsize.pixel_format = mFormat.fmt.pix.pixelformat;
+
+ while(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) {
+
+ switch(frmsize.type) {
+
+ case V4L2_FRMSIZE_TYPE_DISCRETE :
+
+ cout << "[" << frmsize.index << "] : " << frmsize.discrete.width << "x" << frmsize.discrete.height << endl;
+ res = true;
+
+ break;
+
+ case V4L2_FRMSIZE_TYPE_CONTINUOUS :
+
+ break;
+
+ case V4L2_FRMSIZE_TYPE_STEPWISE :
+
+ cout << "Min width : " << frmsize.stepwise.min_width << endl;
+ cout << "Max width : " << frmsize.stepwise.max_width << endl;
+ cout << "Step width : " << frmsize.stepwise.step_width << endl;
+
+ cout << "Min height : " << frmsize.stepwise.min_height << endl;
+ cout << "Max height : " << frmsize.stepwise.max_height << endl;
+ cout << "Step height : " << frmsize.stepwise.step_height << endl;
+
+ break;
+
+ }
+
+ frmsize.index++;
+
+ }
+
+ return res;
+
+ }
+
+ bool CameraV4l2::getFrameSize(int &w, int &h) {
+
+ w = 0;
+ h = 0;
+
+ struct v4l2_format fmt;
+ memset(&fmt, 0, sizeof(fmt));
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmt.fmt.pix.field = V4L2_FIELD_NONE;
+
+ if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)) {
+ perror("Getting Pixel Format");
+ return false;
+ }
+
+ h = fmt.fmt.pix.height;
+ w = fmt.fmt.pix.width;
+
+ return true;
+
+ }
+
+ bool CameraV4l2::getFpsEnum(vector &values){
+
+ bool res = false;
+
+ struct v4l2_frmivalenum temp;
+ memset(&temp, 0, sizeof(temp));
+ temp.pixel_format = mFormat.fmt.pix.pixelformat;
+ temp.width = mFormat.fmt.pix.width;
+ temp.height = mFormat.fmt.pix.height;
+
+ ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &temp);
+ if (temp.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &temp) != -1) {
+ values.push_back(float(temp.discrete.denominator)/temp.discrete.numerator);
+ cout << values.back() << " fps" << endl;
+ temp.index += 1;
+ res = true;
+ }
+ }
+ float stepval = 0;
+ if (temp.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
+ stepval = 1;
+ }
+ if (temp.type == V4L2_FRMIVAL_TYPE_STEPWISE || temp.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
+ float minval = float(temp.stepwise.min.numerator)/temp.stepwise.min.denominator;
+ float maxval = float(temp.stepwise.max.numerator)/temp.stepwise.max.denominator;
+ if (stepval == 0) {
+ stepval = float(temp.stepwise.step.numerator)/temp.stepwise.step.denominator;
+ }
+ for (float cval = minval; cval <= maxval; cval += stepval) {
+ cout << 1/cval << " fps" << endl;
+ values.push_back(1.0/cval);
+ res = true;
+ }
+ }
+
+ return res;
+
+ }
+
+ bool CameraV4l2::getFPS(double &value) {
+
+ struct v4l2_streamparm streamparm;
+ struct v4l2_fract *tpf;
+
+ streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (-1 == ioctl(fd, VIDIOC_G_PARM, &streamparm)) {
+ cout << "Fail to read fps value." << endl;
+ return false;
+ }
+
+ tpf = &streamparm.parm.capture.timeperframe;
+
+ value = (double)tpf->denominator / (double)tpf->numerator;
+
+ return true;
+ }
+
+ string CameraV4l2::getModelName(){
+
+ struct v4l2_capability caps = {};
+
+ // http://linuxtv.org/downloads/v4l-dvb-apis/vidioc-querycap.html
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) {
+ perror("Querying device's name");
+ return "";
+ }
+
+ return (char*)caps.card;
+
+ }
+
+ bool CameraV4l2::setExposureTime(double val){
+
+ if(expMax > 0 && expMin > 0 && val >= expMin && val <= expMax) {
+
+ // ************************ DISABLE AUTO EXPOSURE *****************************
+
+ struct v4l2_queryctrl queryctrl1;
+ struct v4l2_control control1;
+ memset(&queryctrl1, 0, sizeof(queryctrl1));
+ queryctrl1.id = V4L2_CID_EXPOSURE_AUTO;
+
+ if(-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl1)) {
+
+ if(errno != EINVAL) {
+
+ perror("VIDIOC_QUERYCTRL");
+ return false;
+
+ }else {
+
+ printf(">> V4L2_CID_EXPOSURE_AUTO is not supported\n");
+
+ }
+
+ }else if (queryctrl1.flags & V4L2_CTRL_FLAG_DISABLED) {
+
+ printf(">> V4L2_CID_EXPOSURE_AUTO is not supported\n");
+
+ }else {
+
+ memset(&control1, 0, sizeof (control1));
+ control1.id = V4L2_CID_EXPOSURE_AUTO;
+ control1.value = V4L2_EXPOSURE_MANUAL;
+
+ if (-1 == ioctl(fd, VIDIOC_S_CTRL, &control1)) {
+ perror("VIDIOC_S_CTRL");
+ return false;
+ }
+
+ cout << ">> Manual exposure setted." << endl;
+
+ }
+
+ // ************************ SET AUTO EXPOSURE *****************************
+
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+ memset(&queryctrl, 0, sizeof(queryctrl));
+ queryctrl.id = V4L2_CID_EXPOSURE_ABSOLUTE;
+
+ if(-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) {
+
+ if(errno != EINVAL) {
+
+ perror("VIDIOC_QUERYCTRL");
+ return false;
+
+ }else {
+
+ printf(">> V4L2_CID_EXPOSURE_ABSOLUTE is not supported\n");
+
+ }
+
+ }else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+
+ printf(">> V4L2_CID_EXPOSURE_ABSOLUTE is not supported\n");
+
+ }else {
+
+ memset(&control, 0, sizeof (control));
+ control.id = V4L2_CID_EXPOSURE_ABSOLUTE;
+
+ /*
+ V4L2_CID_EXPOSURE_ABSOLUTE integer
+ Determines the exposure time of the camera sensor.
+ The exposure time is limited by the frame interval.
+ Drivers should interpret the values as 100 µs units, w
+ here the value 1 stands for 1/10000th of a second, 10000
+ for 1 second and 100000 for 10 seconds.
+ */
+
+ control.value = val/100;
+ exp = val;
+ printf(">> V4L2_CID_EXPOSURE_ABSOLUTE setted to %f (%f with V4L2)\n", val, val/100);
+
+ if (-1 == ioctl(fd, VIDIOC_S_CTRL, &control)) {
+ perror("VIDIOC_S_CTRL");
+ return false;
+ }
+
+ }
+
+ return true;
+
+ }else {
+
+ if(expMin == -1 && expMax == -1) {
+
+ cout << "Exposure time not supported." << endl;
+ return true;
+
+ }
+
+ cout << "> Exposure value (" << val << ") is not in range [ " << expMin << " - " << expMax << " ]" << endl;
+
+ }
+
+ return false;
+ }
+
+ bool CameraV4l2::setGain(int val){
+
+ if(gainMax > 0 && gainMin > 0 && val >= gainMin && val <= gainMax) {
+
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+ memset(&queryctrl, 0, sizeof(queryctrl));
+ queryctrl.id = V4L2_CID_GAIN;
+
+ if(-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) {
+
+ if(errno != EINVAL) {
+
+ perror("VIDIOC_QUERYCTRL");
+ return false;
+
+ }else {
+
+ printf(">> V4L2_CID_GAIN is not supported\n");
+
+ }
+
+ }else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+
+ printf(">> V4L2_CID_GAIN is not supported\n");
+
+ }else {
+
+ memset(&control, 0, sizeof (control));
+ control.id = V4L2_CID_GAIN;
+ control.value = val;
+ gain = val;
+
+ if (-1 == ioctl(fd, VIDIOC_S_CTRL, &control)) {
+ perror("VIDIOC_S_CTRL");
+ return false;
+ }
+
+ }
+
+ return true;
+
+ }else {
+
+ if(gainMin == -1 && gainMax == -1) {
+
+ cout << "Gain not supported." << endl;
+ return true;
+
+ }
+
+ cout << "> Gain value (" << val << ") is not in range [ " << gainMin << " - " << gainMax << " ]" << endl;
+
+ }
+
+ return false;
+
+ }
+
+ bool CameraV4l2::setFPS(double fps){
+
+ bool res = true;
+ struct v4l2_frmivalenum temp;
+ memset(&temp, 0, sizeof(temp));
+ temp.pixel_format = mFormat.fmt.pix.pixelformat;
+ temp.width = mFormat.fmt.pix.width;
+ temp.height = mFormat.fmt.pix.height;
+
+ ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &temp);
+
+ if (temp.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+
+ vector frameIntervals;
+ while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &temp) != -1) {
+
+ if(fps == (float(temp.discrete.denominator)/temp.discrete.numerator)) {
+
+ struct v4l2_streamparm setfps;
+ struct v4l2_fract *tpf;
+ memset (&setfps, 0, sizeof (setfps));
+ setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ tpf = &setfps.parm.capture.timeperframe;
+
+ tpf->numerator = temp.discrete.numerator;
+ //cout << "numerator : " << tpf->numerator << endl;
+ tpf->denominator = temp.discrete.denominator;//cvRound(fps);
+ //cout << "denominator : " << tpf->denominator << endl;
+ //retval=1;
+ if (ioctl(fd, VIDIOC_S_PARM, &setfps) < 0) {
+ cout << "Failed to set camera FPS:" << strerror(errno) << endl;
+ res = false;
+ break;
+ }
+
+ break;
+
+ }
+
+ temp.index += 1;
+
+ }
+ }
+
+ float stepval = 0;
+ if (temp.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
+ stepval = 1;
+ cout << "V4L2_FRMIVAL_TYPE_CONTINUOUS" << endl;
+ struct v4l2_streamparm setfps;
+ struct v4l2_fract *tpf;
+ memset (&setfps, 0, sizeof (setfps));
+ setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ tpf = &setfps.parm.capture.timeperframe;
+
+ tpf->numerator = 1000;
+ //cout << "numerator : " << tpf->numerator << endl;
+ tpf->denominator = fps*1000;//cvRound(fps);
+ //cout << "denominator : " << tpf->denominator << endl;
+ //retval=1;
+ if (ioctl(fd, VIDIOC_S_PARM, &setfps) < 0) {
+ cout << "Failed to set camera FPS:" << strerror(errno) << endl;
+ res = false;
+
+ }else{
+
+ if (!tpf->denominator || !tpf->numerator)
+ printf("Invalid frame rate\n");
+ else
+ printf("Frame rate set to %.3f fps\n",
+ 1.0 * tpf->denominator / tpf->numerator);
+ }
+ }
+
+ if (temp.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+ cout << "V4L2_FRMIVAL_TYPE_STEPWISE" << endl;
+ float minval = float(temp.stepwise.min.numerator)/temp.stepwise.min.denominator;
+ float maxval = float(temp.stepwise.max.numerator)/temp.stepwise.max.denominator;
+ if (stepval == 0) {
+ stepval = float(temp.stepwise.step.numerator)/temp.stepwise.step.denominator;
+ }
+ /*for (float cval = minval; cval <= maxval; cval += stepval) {
+ cout << 1/cval << " fps" << endl;
+
+ }*/
+
+
+
+ }
+
+ return res;
+
+ }
+
+ bool CameraV4l2::setPixelFormat(CamPixFmt depth){
+
+ struct v4l2_fmtdesc fmtdesc = {0};
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ char fourcc[5] = {0};
+ bool fmtFound = false;
+ char c, e;
+ mFormat.fmt.pix.field = V4L2_FIELD_NONE;
+ EParser fmt;
+ string fstring = fmt.getStringEnum(depth);
+
+ while (0 == xioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) {
+
+ strncpy(fourcc, (char *)&fmtdesc.pixelformat, 4);
+
+ if(string(fourcc) == fstring) {
+
+ fmtFound = true;
+
+ switch(depth) {
+
+ case MONO8 :
+
+ {
+
+ mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY;
+
+ }
+
+ break;
+
+ case GREY :
+
+ {
+
+ mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY;
+
+ }
+
+ break;
+
+ case YUYV :
+
+ {
+
+ mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
+
+ }
+
+ break;
+
+ case UYVY :
+
+ {
+
+ mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
+
+ }
+
+ break;
+
+ case RGB565 :
+
+ {
+
+ mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565;
+
+ }
+
+ break;
+
+ case BGR3 :
+
+ {
+
+ mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR24;
+
+ }
+
+ break;
+
+ case RGB3 :
+
+ {
+
+ mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
+
+ }
+
+ break;
+
+ }
+
+
+ break;
+ }
+
+ fmtdesc.index++;
+ }
+
+ if(!fmtFound) {
+
+ BOOST_LOG_SEV(logger, critical) << ">> FORMAT " << fstring << " NOT SUPPORTED !";
+ return false;
+ }
+
+ //strncpy(fourcc, (char *)&mFormat.fmt.pix.pixelformat, 4);
+
+ return true;
+
+ }
+
+ void CameraV4l2::getAvailablePixelFormats(){
+
+ struct v4l2_fmtdesc fmtdesc = {0};
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ char fourcc[5] = {0};
+ vector pixfmt;
+ char c, e;
+ struct v4l2_format pfmt;
+ memset(&pfmt, 0, sizeof(pfmt));
+ pfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ pfmt.fmt.pix.field = V4L2_FIELD_NONE;
+
+ cout << ">> Device pixel formats :" << endl;
+
+ while (0 == xioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) {
+
+ strncpy(fourcc, (char *)&fmtdesc.pixelformat, 4);
+
+ c = fmtdesc.flags & 1? 'C' : ' ';
+ e = fmtdesc.flags & 2? 'E' : ' ';
+ //printf(" %s : %c%c %s\n", fourcc, c, e, fmtdesc.description);
+ string fmt = string(fourcc);
+ std::transform(fmt.begin(), fmt.end(),fmt.begin(), ::toupper);
+ pixfmt.push_back(fmt);
+ cout << "- " << fmt << endl;
+ fmtdesc.index++;
+ }
+
+ // Compare found pixel formats to currently formats supported by freeture
+
+ cout << endl << ">> Available pixel formats :" << endl;
+ EParser fmt;
+
+ for( int i = 0; i != pixfmt.size(); i++ ) {
+
+ if(fmt.isEnumValue(pixfmt.at(i))) {
+
+ cout << "- " << pixfmt.at(i) << " available --> ID : " << fmt.parseEnum(pixfmt.at(i)) << endl;
+
+ }
+
+ }
+
+ }
+
+ void CameraV4l2::errno_exit (const char *s) {
+ fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno));
+ exit(EXIT_FAILURE);
+ }
+
+ int CameraV4l2::xioctl (int fh, int request, void *arg) {
+ int r;
+
+ do
+ {
+ r = ioctl(fh, request, arg);
+ } while (-1 == r && EINTR == errno);
+
+ return r;
+ }
+
+ int CameraV4l2::read_frame (void) {
+ //struct v4l2_buffer buf;
+ unsigned int i;
+
+ switch (io)
+ {
+ case IO_METHOD_READ:
+ {
+
+ if (-1 == read(fd, buffers[0].start, buffers[0].length))
+ {
+ switch (errno)
+ {
+ case EAGAIN:
+ return 0;
+
+ case EIO:
+ /* Could ignore EIO, see spec. */
+
+ /* fall through */
+
+ default:
+ errno_exit("read");
+ }
+ }
+
+ break;
+ }
+ case IO_METHOD_MMAP:
+ {
+
+ memset(&buf, 0, sizeof(buf));
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+
+ if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf))
+ {
+ switch (errno)
+ {
+ case EAGAIN:
+ return 0;
+
+ case EIO:
+ /* Could ignore EIO, see spec. */
+
+ /* fall through */
+
+ default:
+ errno_exit("VIDIOC_DQBUF");
+ }
+ }
+
+ assert(buf.index < n_buffers);
+
+ if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
+ errno_exit("VIDIOC_QBUF");
+ break;
+ }
+ case IO_METHOD_USERPTR:
+ {
+
+ memset(&buf, 0, sizeof(buf));
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_USERPTR;
+
+ if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf))
+ {
+ switch (errno)
+ {
+ case EAGAIN:
+ return 0;
+
+ case EIO:
+ /* Could ignore EIO, see spec. */
+
+ /* fall through */
+
+ default:
+ {
+ errno_exit("VIDIOC_DQBUF");
+ }
+ }
+ }
+
+ for (i = 0; i < n_buffers; ++i)
+ {
+ if (buf.m.userptr == (unsigned long)buffers[i].start
+ && buf.length == buffers[i].length)
+ break;
+ }
+ assert(i < n_buffers);
+
+ if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))
+ {
+ errno_exit("VIDIOC_QBUF");
+ }
+ break;
+ }
+ }
+
+ return 1;
+ }
+
+ void CameraV4l2::init_read (unsigned int buffer_size) {
+ buffers = (buffer*)(calloc(1, sizeof(*buffers)));
+
+ if (!buffers)
+ {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+
+ buffers[0].length = buffer_size;
+ buffers[0].start = malloc(buffer_size);
+
+ if (!buffers[0].start)
+ {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+ }
+
+ void CameraV4l2::init_mmap (void) {
+
+ struct v4l2_requestbuffers req;
+
+ memset(&req, 0, sizeof(req));
+
+ req.count = 4;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_MMAP;
+
+ if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req))
+ {
+ if (EINVAL == errno)
+ {
+ fprintf(stderr, "%s does not support "
+ "memory mapping\n", mDeviceName);
+ exit(EXIT_FAILURE);
+ }
+ else
+ {
+ errno_exit("VIDIOC_REQBUFS");
+ }
+ }
+
+ if (req.count < 2) \
+ {
+ fprintf(stderr, "Insufficient buffer memory on %s\n",
+ mDeviceName);
+ exit(EXIT_FAILURE);
+ }
+
+ buffers = (buffer*)calloc(req.count, sizeof(*buffers));
+
+ if (!buffers)
+ {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+
+ for (n_buffers = 0; n_buffers < req.count; ++n_buffers)
+ {
+ struct v4l2_buffer buf;
+
+ memset(&buf, 0, sizeof(buf));
+
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = n_buffers;
+
+ if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf))
+ errno_exit("VIDIOC_QUERYBUF");
+
+ buffers[n_buffers].length = buf.length;
+
+ buffers[n_buffers].start =
+ mmap(NULL /* start anywhere */,
+ buf.length,
+ PROT_READ | PROT_WRITE /* required */,
+ MAP_SHARED /* recommended */,
+ fd, buf.m.offset);
+
+ if (MAP_FAILED == buffers[n_buffers].start)
+ errno_exit("mmap");
+ }
+
+ }
+
+ void CameraV4l2::init_userp (unsigned int buffer_size) {
+
+ struct v4l2_requestbuffers req;
+
+ memset(&req, 0, sizeof(req));
+
+ req.count = 4;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_USERPTR;
+
+ if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req))
+ {
+ if (EINVAL == errno)
+ {
+ fprintf(stderr, "%s does not support "
+ "user pointer i/o\n", mDeviceName);
+ exit(EXIT_FAILURE);
+ }
+ else
+ {
+ errno_exit("VIDIOC_REQBUFS");
+ }
+ }
+
+ buffers = (buffer*)calloc(4, sizeof(*buffers));
+
+ if (!buffers)
+ {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+
+ for (n_buffers = 0; n_buffers < 4; ++n_buffers)
+ {
+ buffers[n_buffers].length = buffer_size;
+ buffers[n_buffers].start = malloc(buffer_size);
+
+ if (!buffers[n_buffers].start)
+ {
+ fprintf(stderr, "Out of memory\n");
+ exit(EXIT_FAILURE);
+ }
+ }
+ }
+
+#endif
diff --git a/CameraV4l2.h b/CameraV4l2.h
new file mode 100644
index 0000000..d6ffc02
--- /dev/null
+++ b/CameraV4l2.h
@@ -0,0 +1,190 @@
+/* CameraV4l2.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 17/08/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraV4l2.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 17/08/2015
+*/
+
+#pragma once
+
+#include "config.h"
+
+#ifdef LINUX
+
+ #include "opencv2/highgui/highgui.hpp"
+ #include
+
+ #include
+ #include
+ #include "Frame.h"
+ #include "TimeDate.h"
+ #include "Camera.h"
+ #include
+
+ #include
+ #include
+ #include
+ #include
+
+
+ #include /* low-level i/o */
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+
+ #include
+
+ #define BOOST_LOG_DYN_LINK 1
+
+ #include "EParser.h"
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include
+ #include "ELogSeverityLevel.h"
+ #include "PixFmtConv.h"
+ #include
+
+ using namespace cv;
+ using namespace std;
+
+ class CameraV4l2: public Camera {
+
+ private:
+
+ static boost::log::sources::severity_logger< LogSeverityLevel > logger;
+
+ static class Init{
+
+ public:
+
+ Init(){
+
+ logger.add_attribute("ClassName", boost::log::attributes::constant("CameraV4l2"));
+
+ }
+
+ }initializer;
+
+ const char* mDeviceName;
+ int fd;
+ double expMin, expMax, exp;
+ int gainMin, gainMax, gain;
+ int mWidth, mHeight;
+ int mFrameCounter;
+ struct v4l2_format mFormat;
+ bool mCustomSize;
+
+ public :
+
+ void init_userp (unsigned int buffer_size);
+ void init_mmap (void);
+ void init_read (unsigned int buffer_size);
+ int read_frame (void);
+ void errno_exit (const char *s);
+ int xioctl (int fh, int request, void *arg);
+
+
+ CameraV4l2();
+
+ ~CameraV4l2();
+
+ bool getInfos();
+
+ vector> getCamerasList();
+
+ bool listCameras();
+
+ bool createDevice(int id);
+
+ bool setSize(int width, int height, bool customSize);
+
+ bool grabInitialization();
+
+ void grabCleanse();
+
+ bool acqStart();
+
+ void acqStop();
+
+ bool grabImage(Frame& newFrame);
+
+ bool grabSingleImage(Frame &frame, int camID);
+
+ bool getDeviceNameById(int id, string &device);
+
+ bool getCameraName();
+
+ void getExposureBounds(double &eMin, double &eMax);
+
+ void getGainBounds(int &gMin, int &gMax);
+
+ bool getPixelFormat(CamPixFmt &format);
+
+ bool getFrameSize(int &w, int &h);
+
+ bool getFrameSizeEnum();
+
+ bool getFPS(double &value);
+
+ bool getFpsEnum(vector &values);
+
+ string getModelName();
+
+ double getExposureTime();
+
+ bool setExposureTime(double exp);
+
+ bool setGain(int gain);
+
+ bool setFPS(double fps);
+
+ bool setPixelFormat(CamPixFmt depth);
+
+ void getAvailablePixelFormats();
+
+
+ private :
+
+ bool convertImage(unsigned char* buffer, Mat &image);
+
+ bool setSize();
+
+ };
+
+#endif
diff --git a/CameraVideo.cpp b/CameraVideo.cpp
new file mode 100644
index 0000000..c2321b4
--- /dev/null
+++ b/CameraVideo.cpp
@@ -0,0 +1,155 @@
+/*
+ CameraVideo.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/07/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraVideo.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 13/06/2014
+* \brief Acquisition thread with video in input.
+*/
+
+#include "CameraVideo.h"
+
+boost::log::sources::severity_logger< LogSeverityLevel > CameraVideo::logger;
+
+CameraVideo::Init CameraVideo::initializer;
+
+CameraVideo::CameraVideo(vector videoList, bool verbose):mVideoID(0), mFrameWidth(0), mFrameHeight(0), mReadDataStatus(false){
+
+ mVideoList = videoList;
+
+ // Open the video file for reading.
+ if(mVideoList.size()>0)
+ mCap = VideoCapture(videoList.front());
+ else
+ throw "No video path in input.";
+
+ mExposureAvailable = false;
+ mGainAvailable = false;
+ mInputDeviceType = VIDEO;
+ mVerbose = verbose;
+
+}
+
+CameraVideo::~CameraVideo(void){
+
+}
+
+bool CameraVideo::grabInitialization(){
+
+ if(!mCap.isOpened()) {
+
+ if(mVerbose) BOOST_LOG_SEV(logger,fail) << "Cannot open the video file";
+ if(mVerbose) cout << "Cannot open the video file" << endl;
+ return false;
+ }
+
+ return true;
+
+}
+
+bool CameraVideo::getStopStatus(){
+
+ return mReadDataStatus;
+
+}
+
+bool CameraVideo::getDataSetStatus(){
+
+ if(mVideoID == mVideoList.size())
+ return false;
+ else
+ return true;
+}
+
+bool CameraVideo::loadNextDataSet(string &location){
+
+ if(mVideoID != 0){
+
+ cout << "Change video : " << mVideoID << " - Path : " << mVideoList.at(mVideoID) << endl;
+
+ mCap = VideoCapture(mVideoList.at(mVideoID));
+
+ if(!mCap.isOpened()){
+
+ cout << "Cannot open the video file" << endl;
+ return false;
+
+ }else{
+
+ cout << "Success to open the video file" << endl;
+
+ }
+
+ mFrameHeight = mCap.get(CV_CAP_PROP_FRAME_HEIGHT);
+
+ mFrameWidth = mCap.get(CV_CAP_PROP_FRAME_WIDTH);
+
+ mReadDataStatus = false;
+
+ }
+
+ return true;
+
+}
+
+bool CameraVideo::createDevice(int id) {
+ return true;
+}
+
+bool CameraVideo::grabImage(Frame &img){
+
+ Mat frame;
+
+ if(mCap.read(frame)) {
+
+ //BGR (3 channels) to G (1 channel)
+ cvtColor(frame, frame, CV_BGR2GRAY);
+
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+
+ Frame f = Frame(frame, 0, 0, to_iso_extended_string(time));
+
+ img = f;
+ img.mFrameNumber = mCap.get(CV_CAP_PROP_POS_FRAMES);
+ img.mFrameRemaining = mCap.get(CV_CAP_PROP_FRAME_COUNT) - mCap .get(CV_CAP_PROP_POS_FRAMES);
+ return true;
+
+ }
+
+ if(mCap.get(CV_CAP_PROP_FRAME_COUNT) - mCap .get(CV_CAP_PROP_POS_FRAMES) <=0) {
+
+ mVideoID++;
+ mReadDataStatus = true;
+
+ }
+
+ return false;
+}
+
+
diff --git a/CameraVideo.h b/CameraVideo.h
new file mode 100644
index 0000000..a827942
--- /dev/null
+++ b/CameraVideo.h
@@ -0,0 +1,141 @@
+/*
+ CameraVideo.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/10/2014
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraVideo.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 03/06/2014
+* \brief Acquisition thread with video in input.
+*/
+
+#pragma once
+#include "config.h"
+
+#include "opencv2/highgui/highgui.hpp"
+#include
+
+#ifdef LINUX
+#define BOOST_LOG_DYN_LINK 1
+#endif
+
+#include "Frame.h"
+#include "SaveImg.h"
+#include "TimeDate.h"
+#include "Conversion.h"
+#include "Camera.h"
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include "ELogSeverityLevel.h"
+#include
+#include
+
+using namespace boost::filesystem;
+using namespace cv;
+using namespace std;
+
+class CameraVideo : public Camera{
+
+ private:
+
+ static boost::log::sources::severity_logger< LogSeverityLevel > logger;
+
+ static class Init{
+
+ public:
+
+ Init(){
+
+ logger.add_attribute("ClassName", boost::log::attributes::constant("CameraVideo"));
+
+ }
+
+ }initializer;
+
+ int mFrameWidth;
+ int mFrameHeight;
+ VideoCapture mCap;
+ bool mReadDataStatus;
+ int mVideoID;
+ vector mVideoList;
+
+ public:
+
+ CameraVideo(vector videoList, bool verbose);
+
+ ~CameraVideo(void);
+
+ bool createDevice(int id);
+
+ bool acqStart() {return true;};
+
+ bool listCameras() {return true;};
+
+ bool grabImage(Frame &img);
+
+ bool grabInitialization();
+
+ bool getStopStatus();
+
+ /**
+ * Get data status : Is there another video to use in input ?
+ *
+ * @return If there is still a video to load in input.
+ */
+ bool getDataSetStatus();
+
+ /**
+ * Load next video if there is.
+ *
+ * @return Success status to load next data set.
+ */
+ bool loadNextDataSet(string &location);
+
+ bool getFPS(double &value) {value = 0; return false;};
+
+ bool setExposureTime(double exp){return true;};
+
+ bool setGain(int gain) {return true;};
+
+ bool setFPS(double fps){return true;};
+
+ bool setPixelFormat(CamPixFmt format){return true;};
+
+ bool setSize(int width, int height, bool customSize) {return true;};
+
+};
+
diff --git a/CameraWindows.cpp b/CameraWindows.cpp
new file mode 100644
index 0000000..ca26d36
--- /dev/null
+++ b/CameraWindows.cpp
@@ -0,0 +1,290 @@
+/*
+ CameraWindows.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 02/10/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraWindows.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 02/10/2015
+*/
+
+#include "CameraWindows.h"
+
+#ifdef WINDOWS
+
+CameraWindows::CameraWindows() {
+
+ mVideoInput.setVerbose(false);
+ mExposureAvailable = false;
+ mGainAvailable = false;
+ mFrameCounter = 0;
+ mInputDeviceType = CAMERA;
+ mDevNumber = -1;
+ mBuffer = NULL;
+}
+
+CameraWindows::~CameraWindows()
+{
+ if(mBuffer != NULL)
+ delete[] mBuffer;
+}
+
+vector> CameraWindows::getCamerasList() {
+
+ vector> camerasList;
+
+ int nbCamFound = mVideoInput.listDevices(true);
+
+ if(nbCamFound > 0) {
+
+ for(int i = 0; i < nbCamFound; i++) {
+
+ pair c;
+ c.first = i;
+ c.second = "NAME[" + string(mVideoInput.getDeviceName(i)) + "] SDK[VI]";
+ camerasList.push_back(c);
+
+ }
+
+ }
+
+ return camerasList;
+
+}
+
+bool CameraWindows::setSize(int width, int height, bool customSize) {
+
+ if(customSize)
+ return mVideoInput.setupDevice(mDevNumber,width,height);
+ else
+ return mVideoInput.setupDevice(mDevNumber,640,480);
+
+}
+
+ bool CameraWindows::grabSingleImage(Frame &frame, int camID) {
+
+ int numDevices = mVideoInput.listDevices(true);
+
+ if(frame.mWidth > 0 && frame.mHeight > 0) {
+ if(!mVideoInput.setupDevice(camID, frame.mWidth, frame.mHeight))
+ return false;
+ }else{
+ if(!mVideoInput.setupDevice(camID, 640, 480))
+ return false;
+ }
+
+ // As requested width and height can not always be accomodated make sure to check the size once the device is setup
+ mWidth = mVideoInput.getWidth(camID);
+ mHeight = mVideoInput.getHeight(camID);
+ mSize = mVideoInput.getSize(camID);
+ cout << ">> Size setted to : " << mWidth << "x" << mHeight << endl;
+
+ // Create the buffer where the video will be captured
+ mBuffer = new unsigned char[mSize];
+
+ // Disable autofocus and set focus to 0
+ // mVideoInput.setVideoSettingCamera(camID, CameraControl_Focus, mDefaultFocus, CameraControl_Flags_Manual);
+
+ setPixelFormat(frame.mFormat);
+ setExposureTime(frame.mExposure);
+ setGain(frame.mGain);
+
+ bool success = mVideoInput.getPixels(camID, mBuffer, false, true);
+
+ if(success) {
+
+ cv::Mat image( mHeight, mWidth, CV_8UC3, mBuffer );
+ Mat img;
+ cv::cvtColor(image, img, CV_BGR2GRAY);
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+ string acquisitionDate = to_iso_extended_string(time);
+ frame = Frame(img, 0, 0, acquisitionDate);
+ frame.mFps = 0;
+ frame.mFormat = MONO8;
+ frame.mSaturatedValue = 255;
+ frame.mFrameNumber = 0;
+ mVideoInput.stopDevice(camID);
+ return true;
+
+ }
+
+ std::cout << "Error loading frame from camera (Windows)." << std::endl;
+ mVideoInput.stopDevice(camID);
+ return false;
+
+};
+
+bool CameraWindows::createDevice(int id){
+
+ mDevNumber = id;
+ return true;
+
+}
+
+bool CameraWindows::setPixelFormat(CamPixFmt format){
+ cout << ">> (WARNING) Can't set format with VI." << endl;
+ return true;
+}
+
+void CameraWindows::getExposureBounds(double &eMin, double &eMax){
+ eMin = -1;
+ eMax = -1;
+}
+
+void CameraWindows::getGainBounds(int &gMin, int &gMax){
+ gMin = -1;
+ gMax = -1;
+}
+
+bool CameraWindows::getFPS(double &value){
+ value = 0;
+ return false;
+}
+
+bool CameraWindows::setExposureTime(double value){
+ cout << ">> (WARNING) Can't set exposure time with VI." << endl;
+ return true;
+}
+
+bool CameraWindows::setGain(int value){
+ cout << ">> (WARNING) Can't set gain with VI." << endl;
+ return true;
+}
+
+bool CameraWindows::setFPS(double value){
+
+ // If you want to capture at a different frame rate (default is 30) specify it here, you are not guaranteed to get this fps though.
+ // Call before setupDevice
+ // directshow will try and get the closest possible framerate to what is requested
+ mVideoInput.setIdealFramerate(mDevNumber, (int)value);
+
+ return true;
+
+}
+
+bool CameraWindows::setFpsToLowerValue(){
+ return false;
+}
+
+bool CameraWindows::grabInitialization() {
+
+ // As requested width and height can not always be accomodated make sure to check the size once the device is setup
+ mWidth = mVideoInput.getWidth(mDevNumber);
+ mHeight = mVideoInput.getHeight(mDevNumber);
+ mSize = mVideoInput.getSize(mDevNumber);
+ cout << "Default size : " << mWidth << "x" << mHeight << endl;
+
+ // Create the buffer where the video will be captured
+ mBuffer = new unsigned char[mSize];
+
+ // Disable autofocus and set focus to 0
+ mVideoInput.setVideoSettingCamera(mDevNumber, CameraControl_Focus, mDefaultFocus, CameraControl_Flags_Manual);
+
+
+ //long current_value,min_value,max_value,stepping_delta,flags,defaultValue;
+
+ //mVideoInput.getVideoSettingCamera(mDevNumber,mVideoInput.propBrightness ,min_value,max_value,stepping_delta,current_value,flags,defaultValue);
+ /*cout << "min: "<< min_value << endl;
+ cout << "max: "<< max_value << endl;
+ cout << "flags: "<< flags << endl;
+ cout << "SteppingDelta: "<< stepping_delta << endl;
+ cout << "currentValue: "<< current_value << endl;
+ cout << "defaultValue: "<< defaultValue << endl;*/
+ //mVideoInput.showSettingsWindow(mDevNumber);
+
+ return true;
+}
+
+bool CameraWindows::acqStart(){return true;};
+
+bool CameraWindows::grabImage(Frame &newFrame){
+
+ bool success = mVideoInput.getPixels(mDevNumber, mBuffer, false, true);
+
+ if(success) {
+
+ cv::Mat image( mHeight, mWidth, CV_8UC3, mBuffer );
+ Mat img;
+ cv::cvtColor(image, img, CV_BGR2GRAY);
+ boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
+ string acquisitionDate = to_iso_extended_string(time);
+ newFrame = Frame(img, 0, 0, acquisitionDate);
+ newFrame.mFps = 0;
+ newFrame.mFormat = MONO8;
+ newFrame.mSaturatedValue = 255;
+ newFrame.mFrameNumber = mFrameCounter;
+ mFrameCounter++;
+ return true;
+
+ }
+
+ std::cout << "Error loading frame from camera (Windows)." << std::endl;
+ return false;
+
+}
+
+void CameraWindows::acqStop(){
+ mVideoInput.stopDevice(mDevNumber);
+}
+
+void CameraWindows::grabCleanse(){
+
+}
+
+bool CameraWindows::getPixelFormat(CamPixFmt &format){
+ return false;
+}
+
+// Return exposure time in seconds.
+double CameraWindows::getExposureTime() {
+
+ long min = 0, max = 0, SteppingDelta = 0 , currentValue = 0, flags = 0, defaultValue = 0;
+
+ // https://msdn.microsoft.com/en-us/library/dd318253(v=vs.85).aspx
+ if(mVideoInput.getVideoSettingCamera(mDevNumber, CameraControl_Exposure, min, max, SteppingDelta, currentValue, flags, defaultValue)) {
+
+ double e = 0.0;
+
+ if(currentValue >= 0) {
+
+ e = pow(2,currentValue);
+
+ } else {
+
+ e = 1.0 / pow(2,abs(currentValue));
+
+ }
+
+ return e;
+
+ }
+
+ return 0.0;
+
+}
+
+#endif
diff --git a/CameraWindows.h b/CameraWindows.h
new file mode 100644
index 0000000..2963cbd
--- /dev/null
+++ b/CameraWindows.h
@@ -0,0 +1,129 @@
+/*
+ CameraWindows.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 02/10/2015
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CameraWindows.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 02/10/2015
+*/
+
+#pragma once
+
+#include "config.h"
+
+#ifdef WINDOWS
+
+#include
+#include "opencv2/highgui/highgui.hpp"
+#include "videoInput.h"
+#include "Frame.h"
+#include "Camera.h"
+
+class CameraWindows: public Camera {
+
+ private :
+
+ int mDevNumber;
+ videoInput mVideoInput;
+ int mWidth;
+ int mHeight;
+ int mSize;
+ unsigned char * mBuffer;
+ int mFrameCounter;
+
+ // see : http://msdn.microsoft.com/en-us/library/dd318253(v=vs.85).aspx
+ // and : http://msdn.microsoft.com/en-us/library/dd389148(v=vs.85).aspx
+ typedef enum {
+
+ CameraControl_Pan,
+ CameraControl_Tilt,
+ CameraControl_Roll,
+ CameraControl_Zoom,
+ CameraControl_Exposure,
+ CameraControl_Iris,
+ CameraControl_Focus
+
+ }CameraControlProperty;
+
+ // see : http://msdn.microsoft.com/en-us/library/dd318251(v=vs.85).aspx
+ typedef enum {
+
+ CameraControl_Flags_Auto = 0x0001,
+ CameraControl_Flags_Manual = 0x0002
+
+ }CameraControlFlags;
+
+ static const long mDefaultFocus = 0;
+
+ public:
+
+ CameraWindows();
+
+ ~CameraWindows();
+
+ vector> getCamerasList();
+
+ bool grabSingleImage(Frame &frame, int camID);
+
+ bool createDevice(int id);
+
+ bool setPixelFormat(CamPixFmt format);
+
+ void getExposureBounds(double &eMin, double &eMax);
+
+ void getGainBounds(int &gMin, int &gMax);
+
+ bool getFPS(double &value);
+
+ bool setExposureTime(double value);
+
+ bool setGain(int value);
+
+ bool setFPS(double value);
+
+ bool setSize(int width, int height, bool customSize);
+
+ bool setFpsToLowerValue();
+
+ bool grabInitialization();
+
+ bool acqStart();
+
+ bool grabImage(Frame &newFrame);
+
+ void acqStop();
+
+ void grabCleanse();
+
+ bool getPixelFormat(CamPixFmt &format);
+
+ double getExposureTime();
+
+};
+#endif
+
diff --git a/CfgLoader.cpp b/CfgLoader.cpp
new file mode 100644
index 0000000..d919fff
--- /dev/null
+++ b/CfgLoader.cpp
@@ -0,0 +1,196 @@
+/*
+ CfgLoader.cpp
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/10/2014
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CfgLoader.cpp
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 13/06/2014
+* \brief Methods to fetch parameters from configuration file.
+*/
+
+#include "CfgLoader.h"
+
+CfgLoader::CfgLoader(void){}
+
+void CfgLoader::Clear(){
+
+ mData.clear();
+
+}
+
+bool CfgLoader::Load(const string& file){
+
+ ifstream inFile(file.c_str());
+
+ if (!inFile.good()){
+ return false;
+ }
+
+ string prevKey = "";
+
+ while (inFile.good() && ! inFile.eof()){
+
+ string line;
+ getline(inFile, line);
+
+ // filter out comments
+ if (!line.empty()){
+
+ int pos = line.find('#');
+
+ if (pos != string::npos){
+
+ line = line.substr(0, pos);
+
+ }
+ }
+
+ // split line into key and value
+ if (!line.empty()){
+
+ int pos = line.find('=');
+
+ // "=" not found.
+ if (pos != string::npos){
+
+ string key = Trim(line.substr(0, pos));
+ string value = Trim(line.substr(pos + 1));
+
+ if (!key.empty() && !value.empty()){
+
+ prevKey = key;
+ mData[key] = value;
+
+ }
+
+ }else if(line.size() > 1 && !prevKey.empty()){
+
+ mData[prevKey] += Trim(line);
+
+ }
+ }
+ }
+
+ return true;
+}
+
+bool CfgLoader::Contains(const string& key) const{
+
+ return mData.find(key) != mData.end();
+}
+
+bool CfgLoader::Get(const string& key, string& value) const{
+
+ map::const_iterator iter = mData.find(key);
+
+ if(iter != mData.end()){
+
+ value = iter->second;
+ return true;
+
+ }else{
+
+ return false;
+ }
+}
+
+bool CfgLoader::Get(const string& key, int& value) const{
+
+ string str;
+
+ if(Get(key, str)){
+
+ value = atoi(str.c_str());
+ return true;
+
+ }else{
+
+ return false;
+ }
+}
+
+bool CfgLoader::Get(const string& key, long& value) const{
+
+ string str;
+
+ if(Get(key, str)){
+
+ value = atol(str.c_str());
+ return true;
+
+ }else{
+
+ return false;
+ }
+}
+
+bool CfgLoader::Get(const string& key, double& value) const{
+
+ string str;
+
+ if(Get(key, str)){
+
+ value = atof(str.c_str());
+ return true;
+
+ }else{
+
+ return false;
+ }
+}
+
+bool CfgLoader::Get(const string& key, bool& value) const{
+
+ string str;
+
+ if(Get(key, str)){
+
+ value = (str == "true");
+ return true;
+
+ }else{
+
+ return false;
+ }
+}
+
+string CfgLoader::Trim(const string& str){
+
+ int first = str.find_first_not_of(" \t");
+
+ if(first != string::npos){
+
+ int last = str.find_last_not_of(" \t");
+
+ return str.substr(first, last - first + 1);
+
+ }else{
+
+ return "";
+ }
+}
diff --git a/CfgLoader.h b/CfgLoader.h
new file mode 100644
index 0000000..01d0ea4
--- /dev/null
+++ b/CfgLoader.h
@@ -0,0 +1,138 @@
+/*
+ CfgLoader.h
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+*
+* This file is part of: freeture
+*
+* Copyright: (C) 2014-2015 Yoan Audureau
+* FRIPON-GEOPS-UPSUD-CNRS
+*
+* License: GNU General Public License
+*
+* FreeTure is free software: you can redistribute it and/or modify
+* it under the terms of the GNU General Public License as published by
+* the Free Software Foundation, either version 3 of the License, or
+* (at your option) any later version.
+* FreeTure is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+* GNU General Public License for more details.
+* You should have received a copy of the GNU General Public License
+* along with FreeTure. If not, see .
+*
+* Last modified: 20/10/2014
+*
+*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/
+
+/**
+* \file CfgLoader.h
+* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD
+* \version 1.0
+* \date 03/06/2014
+* \brief Load parameters from a configuration file.
+*/
+
+#pragma once
+
+#include
+#include
+#include
+#include