diff --git a/AcqSchedule.cpp b/AcqSchedule.cpp new file mode 100644 index 0000000..fadfd77 --- /dev/null +++ b/AcqSchedule.cpp @@ -0,0 +1,49 @@ +/* + AcqSchedule.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file AcqSchedule.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 19/06/2014 +* \brief +*/ + +#include "AcqSchedule.h" + +AcqSchedule::AcqSchedule(int H, int M, int S, int E, int G, int F, int N): + mH(H), mM(M), mS(S), mE(E), mG(G), mN(N), mF(F) { + +} + +AcqSchedule::AcqSchedule(): + mH(0), mM(0), mS(0), mE(0), mG(0), mN(0), mF(0) { + +} + +AcqSchedule::~AcqSchedule() {}; + diff --git a/AcqSchedule.h b/AcqSchedule.h new file mode 100644 index 0000000..58adc70 --- /dev/null +++ b/AcqSchedule.h @@ -0,0 +1,145 @@ +/* + AcqSchedule.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file AcqSchedule.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 19/06/2014 +* \brief +*/ + +#pragma once + +#include +#include + +using namespace std; + +class AcqSchedule{ + + private: + + int mH; // Hours + int mM; // Minutes + int mS; // Seconds + int mE; // Exposure time + int mG; // Gain + int mN; // Repetition number + int mF; // Format + string mDate; + + public: + + /** + * Constructor. + * + * @param H Hour. + * @param M Minutes. + * @param S Seconds. + * @param E Exposure time. + * @param G Gain. + * @param F Format. + * @param N Repetition number. + */ + AcqSchedule(int H, int M, int S, int E, int G, int F, int N); + + /** + * Constructor. + * + */ + AcqSchedule(); + + /** + * Destructor. + * + */ + ~AcqSchedule(); + + /** + * Get acquisition hours. + * + * @return Hours. + */ + int getH() {return mH;}; + + /** + * Get acquisition minutes. + * + * @return Minutes. + */ + int getM() {return mM;}; + + /** + * Get acquisition seconds. + * + * @return Seconds. + */ + int getS() {return mS;}; + + /** + * Get acquisition exposure time value. + * + * @return Exposure time. + */ + int getE() {return mE;}; + + /** + * Get acquisition gain. + * + * @return Gain. + */ + int getG() {return mG;}; + + /** + * Get acquisition format. + * + * @return Format : 8 or 12. + */ + int getF() {return mF;}; + + /** + * Get acquisition repetition number. + * + * @return Repetition number. + */ + int getN() {return mN;}; + + /** + * Set acquisition date. + * + * @param Date : YYYY-MM-DDTHH:MM:SS,fffffffff + */ + void setDate(string date) {mDate = date;}; + + /** + * Get acquisition date. + * + * @return Date : YYYY-MM-DDTHH:MM:SS,fffffffff + */ + string getDate() {return mDate;}; + +}; diff --git a/AcqThread.cpp b/AcqThread.cpp new file mode 100644 index 0000000..696cc02 --- /dev/null +++ b/AcqThread.cpp @@ -0,0 +1,1223 @@ +/* + AcqThread.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2016 Yoan Audureau, Chiara Marmo +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 03/10/2016 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file AcqThread.cpp +* \author Yoan Audureau, Chiara Marmo -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 21/01/2015 +* \brief Acquisition thread. +*/ + +#include "AcqThread.h" + +boost::log::sources::severity_logger< LogSeverityLevel > AcqThread::logger; + +AcqThread::Init AcqThread::initializer; + +AcqThread::AcqThread( boost::circular_buffer *fb, +// vector *frame_sprite, + boost::mutex *fb_m, + boost::condition_variable *fb_c, + bool *sSignal, + boost::mutex *sSignal_m, + boost::condition_variable *sSignal_c, + bool *dSignal, + boost::mutex *dSignal_m, + boost::condition_variable *dSignal_c, + DetThread *detection, + StackThread *stack, + int cid, + dataParam dp, + stackParam sp, + stationParam stp, + detectionParam dtp, + cameraParam acq, + framesParam fp, + videoParam vp, + fitskeysParam fkp) { + + frameBuffer = fb; + // frameSprite = frame_sprite; + frameBuffer_mutex = fb_m; + frameBuffer_condition = fb_c; + stackSignal = sSignal; + stackSignal_mutex = sSignal_m; + stackSignal_condition = sSignal_c; + detSignal = dSignal; + detSignal_mutex = dSignal_m; + detSignal_condition = dSignal_c; + pDetection = detection; + pStack = stack; + mThread = NULL; + //sprite = NULL; + mMustStop = false; + mDevice = NULL; + mThreadTerminated = false; + mNextAcqIndex = 0; + pExpCtrl = NULL; + mDeviceID = cid; + mdp = dp; + msp = sp; + mstp = stp; + mdtp = dtp; + mcp = acq; + mvp = vp; + mfp = fp; + + //mut_sprite = new mutex(); + spr_reader; + +} + +AcqThread::~AcqThread(void){ + + if(mDevice != NULL) + delete mDevice; + + if(mThread != NULL) + delete mThread; + + if(pExpCtrl != NULL) + delete pExpCtrl; + + /*if(sprite != NULL) + delete sprite;*/ + +} + +void AcqThread::stopThread(){ + + mMustStopMutex.lock(); + mMustStop = true; + mMustStopMutex.unlock(); + + if(mThread != NULL) + while(mThread->timed_join(boost::posix_time::seconds(2)) == false) + mThread->interrupt(); + +} + +bool AcqThread::startThread() { + + // Create a device. + mDevice = new Device(mcp, mfp, mvp, mDeviceID); + + // Search available devices. + mDevice->listDevices(false); + + // CREATE CAMERA + if(!mDevice->createCamera()) + return false; + + // Prepare continuous acquisition. + if(!prepareAcquisitionOnDevice()) + return false; + + // Create acquisition thread. + mThread = new boost::thread(boost::ref(*this)); + //sprite = new SpriteThread(std::ref(mut_sprite),frameSprite, mDevice, spr_reader, frameSprite->capacity()); + + return true; + +} + +bool AcqThread::getThreadStatus(){ + + return mThreadTerminated; + +} + +void AcqThread::operator()(){ + + bool stop = false; + + BOOST_LOG_SCOPED_THREAD_TAG("LogName", "ACQ_THREAD"); + BOOST_LOG_SEV(logger,notification) << "\n"; + BOOST_LOG_SEV(logger,notification) << "=============================================="; + BOOST_LOG_SEV(logger,notification) << "========== START ACQUISITION THREAD =========="; + BOOST_LOG_SEV(logger,notification) << "=============================================="; + + try { + + // Search next acquisition according to the current time. + selectNextAcquisitionSchedule(TimeDate::splitIsoExtendedDate(to_iso_extended_string(boost::posix_time::microsec_clock::universal_time()))); + + // Exposure adjustment variables. + bool exposureControlStatus = false; + bool exposureControlActive = false; + bool cleanStatus = false; + + // If exposure can be set on the input device. + if(mDevice->getExposureStatus()) { + + pExpCtrl = new ExposureControl( mcp.EXPOSURE_CONTROL_FREQUENCY, + mcp.EXPOSURE_CONTROL_SAVE_IMAGE, + mcp.EXPOSURE_CONTROL_SAVE_INFOS, + mdp.DATA_PATH, + mstp.STATION_NAME); + } + + TimeMode previousTimeMode = NONE; + + /// Acquisition process. + do { + + // Location of a video or frames if input type is FRAMES or VIDEO. + string location = ""; + + // Load videos file or frames directory if input type is FRAMES or VIDEO + if(!mDevice->loadNextCameraDataSet(location)) break; + + if(pDetection != NULL) pDetection->setCurrentDataSet(location); + + // Reference time to compute interval between regular captures. + string cDate = to_simple_string(boost::posix_time::microsec_clock::universal_time()); + string refDate = cDate.substr(0, cDate.find(".")); + + chrono::duration dur_elapsed; + chrono::time_point start; + + do { + start = chrono::high_resolution_clock::now(); + + // Container for the grabbed image. + Frame newFrame; + + // Time counter of grabbing a frame. + double tacq = (double)getTickCount(); + + // Grab a frame. + if(mDevice->runContinuousCapture(newFrame)) { + + BOOST_LOG_SEV(logger, normal) << "============= FRAME " << newFrame.mFrameNumber << " ============= "; + cout << "============= FRAME " << newFrame.mFrameNumber << " ============= " << endl; + + + if(spr_reader.extractValueForKeyword("ACQ_SPRITE_ENABLED")=="true") + { + //***Here, we (Matthieu and Sebastien) add a function to only analyse when the sun is "sleeping" + string vec_time = to_simple_string(boost::posix_time::microsec_clock::universal_time()); + int pos_space = vec_time.find(' '); + int time_hour = atoi(vec_time.substr(pos_space+1,3).c_str()); + + /*if(time_hour>=21 || time_hour<=2) + { + sprite->addFrame(newFrame); + }*/ + + //sprite->addFrame(newFrame); + + + + } + + + // If camera type in input is FRAMES or VIDEO. + if(mDevice->mVideoFramesInput) { + + // Push the new frame in the framebuffer. + boost::mutex::scoped_lock lock(*frameBuffer_mutex); + frameBuffer->push_back(newFrame); + lock.unlock(); + + // Notify detection thread. + if(pDetection != NULL) { + + boost::mutex::scoped_lock lock2(*detSignal_mutex); + *detSignal = true; + detSignal_condition->notify_one(); + lock2.unlock(); + + } + + // Slow down the time in order to give more time to the detection process. + int twait = 100; + if(mvp.INPUT_TIME_INTERVAL == 0 && mfp.INPUT_TIME_INTERVAL > 0) + twait = mfp.INPUT_TIME_INTERVAL; + else if(mvp.INPUT_TIME_INTERVAL > 0 && mfp.INPUT_TIME_INTERVAL == 0) + twait = mvp.INPUT_TIME_INTERVAL; + #ifdef WINDOWS + Sleep(twait); + #else + #ifdef LINUX + usleep(twait * 1000); + #endif + #endif + + + }else { + + // Get current time in seconds. + int currentTimeInSec = newFrame.mDate.hours * 3600 + newFrame.mDate.minutes * 60 + (int)newFrame.mDate.seconds; + + // Detect day or night. + TimeMode currentTimeMode = NONE; + + if((currentTimeInSec > mStopSunsetTime) || (currentTimeInSec < mStartSunriseTime)) { + currentTimeMode = NIGHT; + }else if((currentTimeInSec > mStartSunriseTime) && (currentTimeInSec < mStopSunsetTime)) { + currentTimeMode = DAY; + } + + // If exposure control is not active, the new frame can be shared with others threads. + if(!exposureControlStatus) { + + + // Push the new frame in the framebuffer. + boost::mutex::scoped_lock lock(*frameBuffer_mutex); + frameBuffer->push_back(newFrame); + lock.unlock(); + + // Notify detection thread. + if(pDetection != NULL) { + + if(previousTimeMode != currentTimeMode && mdtp.DET_MODE != DAYNIGHT) { + + BOOST_LOG_SEV(logger, notification) << "TimeMode has changed ! "; + boost::mutex::scoped_lock lock(*detSignal_mutex); + *detSignal = false; + lock.unlock(); + cout << "Send interruption signal to detection process " << endl; + pDetection->interruptThread(); + + }else if(mdtp.DET_MODE == currentTimeMode || mdtp.DET_MODE == DAYNIGHT) { + + boost::mutex::scoped_lock lock2(*detSignal_mutex); + *detSignal = true; + detSignal_condition->notify_one(); + lock2.unlock(); + + } + } + + // Notify stack thread. + if(pStack != NULL) { + + // TimeMode has changed. + if(previousTimeMode != currentTimeMode && msp.STACK_MODE != DAYNIGHT) { + + BOOST_LOG_SEV(logger, notification) << "TimeMode has changed ! "; + boost::mutex::scoped_lock lock(*stackSignal_mutex); + *stackSignal = false; + lock.unlock(); + + // Force interruption. + cout << "Send interruption signal to stack " << endl; + pStack->interruptThread(); + + }else if(msp.STACK_MODE == currentTimeMode || msp.STACK_MODE == DAYNIGHT) { + + boost::mutex::scoped_lock lock3(*stackSignal_mutex); + *stackSignal = true; + stackSignal_condition->notify_one(); + lock3.unlock(); + + } + } + + cleanStatus = false; + + }else { + + // Exposure control is active, the new frame can't be shared with others threads. + if(!cleanStatus) { + + // If stack process exists. + if(pStack != NULL) { + + boost::mutex::scoped_lock lock(*stackSignal_mutex); + *stackSignal = false; + lock.unlock(); + + // Force interruption. + cout << "Send interruption signal to stack " << endl; + pStack->interruptThread(); + + } + + // If detection process exists + if(pDetection != NULL) { + + boost::mutex::scoped_lock lock(*detSignal_mutex); + *detSignal = false; + lock.unlock(); + cout << "Sending interruption signal to detection process... " << endl; + pDetection->interruptThread(); + + } + + // Reset framebuffer. + cout << "Cleaning frameBuffer..." << endl; + boost::mutex::scoped_lock lock(*frameBuffer_mutex); + frameBuffer->clear(); + lock.unlock(); + + cleanStatus = true; + + } + + } + + previousTimeMode = currentTimeMode; + + // Adjust exposure time. + if(pExpCtrl != NULL && exposureControlActive) + { + pyrDown(newFrame.mImg,newFrame.mImg, Size(newFrame.mImg.cols / 2, newFrame.mImg.rows / 2)); + exposureControlStatus = pExpCtrl->controlExposureTime(mDevice, newFrame.mImg, newFrame.mDate, mdtp.MASK, mDevice->mMinExposureTime, mcp.ACQ_FPS); + } + + // Get current date YYYYMMDD. + string currentFrameDate = TimeDate::getYYYYMMDD(newFrame.mDate); + + // If the date has changed, sun ephemeris must be updated. + if(currentFrameDate != mCurrentDate) { + + BOOST_LOG_SEV(logger, notification) << "Date has changed. Former Date is " << mCurrentDate << ". New Date is " << currentFrameDate << "." ; + computeSunTimes(); + + } + + // Acquisition at regular time interval is enabled. + if(mcp.regcap.ACQ_REGULAR_ENABLED && !mDevice->mVideoFramesInput) { + + cDate = to_simple_string(boost::posix_time::microsec_clock::universal_time()); + string nowDate = cDate.substr(0, cDate.find(".")); + + boost::posix_time::ptime t1(boost::posix_time::time_from_string(refDate)); + boost::posix_time::ptime t2(boost::posix_time::time_from_string(nowDate)); + + boost::posix_time::time_duration td = t2 - t1; + long secTime = td.total_seconds(); + cout << "NEXT REGCAP : " << (int)(mcp.regcap.ACQ_REGULAR_CFG.interval - secTime) << "s" << endl; + + // Check it's time to run a regular capture. + if(secTime >= mcp.regcap.ACQ_REGULAR_CFG.interval) { + + // Current time is after the sunset stop and before the sunrise start = NIGHT + if((currentTimeMode == NIGHT) && (mcp.regcap.ACQ_REGULAR_MODE == NIGHT || mcp.regcap.ACQ_REGULAR_MODE == DAYNIGHT)) { + + BOOST_LOG_SEV(logger, notification) << "Run regular acquisition."; + + runImageCapture( mcp.regcap.ACQ_REGULAR_CFG.rep, + mcp.regcap.ACQ_REGULAR_CFG.exp, + mcp.regcap.ACQ_REGULAR_CFG.gain, + mcp.regcap.ACQ_REGULAR_CFG.fmt, + mcp.regcap.ACQ_REGULAR_OUTPUT, + mcp.regcap.ACQ_REGULAR_PRFX); + + // Current time is between sunrise start and sunset stop = DAY + }else if(currentTimeMode == DAY && (mcp.regcap.ACQ_REGULAR_MODE == DAY || mcp.regcap.ACQ_REGULAR_MODE == DAYNIGHT)) { + + BOOST_LOG_SEV(logger, notification) << "Run regular acquisition."; + saveImageCaptured(newFrame, 0, mcp.regcap.ACQ_REGULAR_OUTPUT, mcp.regcap.ACQ_REGULAR_PRFX); + + } + + // Reset reference time in case a long exposure has been done. + cDate = to_simple_string(boost::posix_time::microsec_clock::universal_time()); + refDate = cDate.substr(0, cDate.find(".")); + + } + + } + + // Acquisiton at scheduled time is enabled. + if(mcp.schcap.ACQ_SCHEDULE.size() != 0 && mcp.schcap.ACQ_SCHEDULE_ENABLED && !mDevice->mVideoFramesInput) { + + int next = (mNextAcq.hours * 3600 + mNextAcq.min * 60 + mNextAcq.sec) - (newFrame.mDate.hours * 3600 + newFrame.mDate.minutes * 60 + newFrame.mDate.seconds); + + if(next < 0) { + next = (24 * 3600) - (newFrame.mDate.hours * 3600 + newFrame.mDate.minutes * 60 + newFrame.mDate.seconds) + (mNextAcq.hours * 3600 + mNextAcq.min * 60 + mNextAcq.sec); + cout << "next : " << next << endl; + } + + vectortsch = TimeDate::HdecimalToHMS(next/3600.0); + + cout << "NEXT SCHCAP : " << tsch.at(0) << "h" << tsch.at(1) << "m" << tsch.at(2) << "s" << endl; + + // It's time to run scheduled acquisition. + if( mNextAcq.hours == newFrame.mDate.hours && + mNextAcq.min == newFrame.mDate.minutes && + (int)newFrame.mDate.seconds == mNextAcq.sec) { + + CamPixFmt format; + format = mNextAcq.fmt; + + runImageCapture( mNextAcq.rep, + mNextAcq.exp, + mNextAcq.gain, + format, + mcp.schcap.ACQ_SCHEDULE_OUTPUT, + ""); + + // Update mNextAcq + selectNextAcquisitionSchedule(newFrame.mDate); + + }else { + + // The current time has elapsed. + if(newFrame.mDate.hours > mNextAcq.hours) { + + selectNextAcquisitionSchedule(newFrame.mDate); + + }else if(newFrame.mDate.hours == mNextAcq.hours) { + + if(newFrame.mDate.minutes > mNextAcq.min) { + + selectNextAcquisitionSchedule(newFrame.mDate); + + }else if(newFrame.mDate.minutes == mNextAcq.min) { + + if(newFrame.mDate.seconds > mNextAcq.sec) { + + selectNextAcquisitionSchedule(newFrame.mDate); + + } + + } + + } + + } + + } + + // Check sunrise and sunset time. + if( (((currentTimeInSec > mStartSunriseTime && currentTimeInSec < mStopSunriseTime) || + (currentTimeInSec > mStartSunsetTime && currentTimeInSec < mStopSunsetTime))) && !mDevice->mVideoFramesInput) { + + exposureControlActive = true; + + }else { + + // Print time before sunrise. + if(currentTimeInSec < mStartSunriseTime || currentTimeInSec > mStopSunsetTime ) { + vector nextSunrise; + if(currentTimeInSec < mStartSunriseTime) + nextSunrise = TimeDate::HdecimalToHMS((mStartSunriseTime - currentTimeInSec) / 3600.0); + if(currentTimeInSec > mStopSunsetTime) + nextSunrise = TimeDate::HdecimalToHMS(((24*3600 - currentTimeInSec) + mStartSunriseTime ) / 3600.0); + + cout << "NEXT SUNRISE : " << nextSunrise.at(0) << "h" << nextSunrise.at(1) << "m" << nextSunrise.at(2) << "s" << endl; + } + + // Print time before sunset. + if(currentTimeInSec > mStopSunriseTime && currentTimeInSec < mStartSunsetTime){ + vector nextSunset; + nextSunset = TimeDate::HdecimalToHMS((mStartSunsetTime - currentTimeInSec) / 3600.0); + cout << "NEXT SUNSET : " << nextSunset.at(0) << "h" << nextSunset.at(1) << "m" << nextSunset.at(2) << "s" << endl; + + } + + // Reset exposure time when sunrise or sunset is finished. + if(exposureControlActive) { + + // In DAYTIME : Apply minimum available exposure time. + if((currentTimeInSec >= mStopSunriseTime && currentTimeInSec < mStartSunsetTime)){ + + BOOST_LOG_SEV(logger, notification) << "Apply day exposure time : " << mDevice->getDayExposureTime(); + mDevice->setCameraDayExposureTime(); + BOOST_LOG_SEV(logger, notification) << "Apply day exposure time : " << mDevice->getDayGain(); + mDevice->setCameraDayGain(); + + // In NIGHTTIME : Apply maximum available exposure time. + }else if((currentTimeInSec >= mStopSunsetTime) || (currentTimeInSec < mStartSunriseTime)){ + + BOOST_LOG_SEV(logger, notification) << "Apply night exposure time." << mDevice->getNightExposureTime(); + mDevice->setCameraNightExposureTime(); + BOOST_LOG_SEV(logger, notification) << "Apply night exposure time." << mDevice->getNightGain(); + mDevice->setCameraNightGain(); + + } + } + + exposureControlActive = false; + exposureControlStatus = false; + + } + + } + + } + + tacq = (((double)getTickCount() - tacq)/getTickFrequency())*1000; + std::cout << " [ TIME ACQ ] : " << tacq << " ms ~cFPS(" << (1.0/(tacq/1000.0)) << ")" << endl; + BOOST_LOG_SEV(logger, normal) << " [ TIME ACQ ] : " << tacq << " ms"; + + mMustStopMutex.lock(); + stop = mMustStop; + mMustStopMutex.unlock(); + + dur_elapsed = chrono::high_resolution_clock::now() - start; + //cerr<<"Ara "<getCameraStatus()); + + // Reset detection process to prepare the analyse of a new data set. + if(pDetection != NULL) { + + pDetection->getDetMethod()->resetDetection(true); + pDetection->getDetMethod()->resetMask(); + pDetection->updateDetectionReport(); + if(!pDetection->getRunStatus()) + break; + + } + + // Clear framebuffer. + boost::mutex::scoped_lock lock(*frameBuffer_mutex); + frameBuffer->clear(); + lock.unlock(); + + }while(mDevice->getCameraDataSetStatus() && stop == false); + + }catch(const boost::thread_interrupted&){ + + BOOST_LOG_SEV(logger,notification) << "AcqThread ended."; + cout << "AcqThread ended." <stopCamera(); + + mThreadTerminated = true; + + std::cout << "Acquisition Thread TERMINATED." << endl; + BOOST_LOG_SEV(logger,notification) << "Acquisition Thread TERMINATED"; + +} + +void AcqThread::selectNextAcquisitionSchedule(TimeDate::Date date){ + + if(mcp.schcap.ACQ_SCHEDULE.size() != 0){ + + // Search next acquisition + for(int i = 0; i < mcp.schcap.ACQ_SCHEDULE.size(); i++){ + + if(date.hours < mcp.schcap.ACQ_SCHEDULE.at(i).hours){ + + mNextAcqIndex = i; + break; + + }else if(date.hours == mcp.schcap.ACQ_SCHEDULE.at(i).hours){ + + if(date.minutes < mcp.schcap.ACQ_SCHEDULE.at(i).min){ + + mNextAcqIndex = i; + break; + + }else if(date.minutes == mcp.schcap.ACQ_SCHEDULE.at(i).min){ + + if(date.seconds < mcp.schcap.ACQ_SCHEDULE.at(i).sec){ + + mNextAcqIndex = i; + break; + + } + } + } + } + + mNextAcq = mcp.schcap.ACQ_SCHEDULE.at(mNextAcqIndex); + + } + +} + +bool AcqThread::buildAcquisitionDirectory(string YYYYMMDD){ + + namespace fs = boost::filesystem; + string root = mdp.DATA_PATH + mstp.STATION_NAME + "_" + YYYYMMDD +"/"; + + string subDir = "captures/"; + string finalPath = root + subDir; + + mOutputDataPath = finalPath; + BOOST_LOG_SEV(logger,notification) << "CompleteDataPath : " << mOutputDataPath; + + path p(mdp.DATA_PATH); + path p1(root); + path p2(root + subDir); + + // If DATA_PATH exists + if(fs::exists(p)){ + + // If DATA_PATH/STATI ON_YYYYMMDD/ exists + if(fs::exists(p1)){ + + // If DATA_PATH/STATION_YYYYMMDD/captures/ doesn't exists + if(!fs::exists(p2)){ + + // If fail to create DATA_PATH/STATION_YYYYMMDD/captures/ + if(!fs::create_directory(p2)){ + + BOOST_LOG_SEV(logger,critical) << "Unable to create captures directory : " << p2.string(); + return false; + + // If success to create DATA_PATH/STATION_YYYYMMDD/captures/ + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create captures directory : " << p2.string(); + return true; + + } + } + + // If DATA_PATH/STATION_YYYYMMDD/ doesn't exists + }else{ + + // If fail to create DATA_PATH/STATION_YYYYMMDD/ + if(!fs::create_directory(p1)){ + + BOOST_LOG_SEV(logger,fail) << "Unable to create STATION_YYYYMMDD directory : " << p1.string(); + return false; + + // If success to create DATA_PATH/STATION_YYYYMMDD/ + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create STATION_YYYYMMDD directory : " << p1.string(); + + // If fail to create DATA_PATH/STATION_YYYYMMDD/stack/ + if(!fs::create_directory(p2)){ + + BOOST_LOG_SEV(logger,critical) << "Unable to create captures directory : " << p2.string(); + return false; + + // If success to create DATA_PATH/STATION_YYYYMMDD/stack/ + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create captures directory : " << p2.string(); + return true; + + } + } + } + + // If DATA_PATH doesn't exists + }else{ + + // If fail to create DATA_PATH + if(!fs::create_directory(p)){ + + BOOST_LOG_SEV(logger,fail) << "Unable to create DATA_PATH directory : " << p.string(); + return false; + + // If success to create DATA_PATH + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create DATA_PATH directory : " << p.string(); + + // If fail to create DATA_PATH/STATION_YYYYMMDD/ + if(!fs::create_directory(p1)){ + + BOOST_LOG_SEV(logger,fail) << "Unable to create STATION_YYYYMMDD directory : " << p1.string(); + return false; + + // If success to create DATA_PATH/STATION_YYYYMMDD/ + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create STATION_YYYYMMDD directory : " << p1.string(); + + // If fail to create DATA_PATH/STATION_YYYYMMDD/captures/ + if(!fs::create_directory(p2)){ + + BOOST_LOG_SEV(logger,critical) << "Unable to create captures directory : " << p2.string(); + return false; + + // If success to create DATA_PATH/STATION_YYYYMMDD/captures/ + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create captures directory : " << p2.string(); + return true; + + } + } + } + } + + return true; +} + +void AcqThread::runImageCapture(int imgNumber, int imgExposure, int imgGain, CamPixFmt imgFormat, ImgFormat imgOutput, string imgPrefix) { + + // Stop camera + mDevice->stopCamera(); + + // Stop stack process. + if(pStack != NULL){ + + boost::mutex::scoped_lock lock(*stackSignal_mutex); + *stackSignal = false; + lock.unlock(); + + // Force interruption. + BOOST_LOG_SEV(logger, notification) << "Send reset signal to stack. "; + pStack->interruptThread(); + + } + + // Stop detection process. + if(pDetection != NULL){ + + boost::mutex::scoped_lock lock(*detSignal_mutex); + *detSignal = false; + lock.unlock(); + BOOST_LOG_SEV(logger, notification) << "Send reset signal to detection process. "; + pDetection->interruptThread(); + + } + + // Reset framebuffer. + BOOST_LOG_SEV(logger, notification) << "Cleaning frameBuffer..."; + boost::mutex::scoped_lock lock(*frameBuffer_mutex); + frameBuffer->clear(); + lock.unlock(); + + for(int i = 0; i < imgNumber; i++) { + + BOOST_LOG_SEV(logger, notification) << "Prepare capture n° " << i; + + // Configuration for single capture. + Frame frame; + BOOST_LOG_SEV(logger, notification) << "Exposure time : " << imgExposure; + frame.mExposure = imgExposure; + BOOST_LOG_SEV(logger, notification) << "Gain : " << imgGain; + frame.mGain = imgGain; + EParser format; + BOOST_LOG_SEV(logger, notification) << "Format : " << format.getStringEnum(imgFormat); + frame.mFormat = imgFormat; + + if(mcp.ACQ_RES_CUSTOM_SIZE) { + frame.mHeight = mcp.ACQ_HEIGHT; + frame.mWidth = mcp.ACQ_WIDTH; + } + + // Run single capture. + BOOST_LOG_SEV(logger, notification) << "Run single capture."; + if(mDevice->runSingleCapture(frame)) { + + BOOST_LOG_SEV(logger, notification) << "Single capture succeed !"; + cout << "Single capture succeed !" << endl; + saveImageCaptured(frame, i, imgOutput, imgPrefix); + + }else{ + + BOOST_LOG_SEV(logger, fail) << "Single capture failed !"; + + } + + } + + #ifdef WINDOWS + Sleep(1000); + #else + #ifdef LINUX + sleep(1); + #endif + #endif + + BOOST_LOG_SEV(logger, notification) << "Restarting camera in continuous mode..."; + + // RECREATE CAMERA + if(!mDevice->recreateCamera()) + throw "Fail to restart camera."; + + prepareAcquisitionOnDevice(); + +} + +void AcqThread::saveImageCaptured(Frame &img, int imgNum, ImgFormat outputType, string imgPrefix) { + + if(img.mImg.data) { + + string YYYYMMDD = TimeDate::getYYYYMMDD(img.mDate); + + if(buildAcquisitionDirectory(YYYYMMDD)) { + + string fileName = imgPrefix + "_" + TimeDate::getYYYYMMDDThhmmss(img.mDate) + "_UT-" + Conversion::intToString(imgNum); + + switch(outputType) { + + case JPEG : + + { + + switch(img.mFormat) { + + case MONO12 : + + { + + Mat temp; + img.mImg.copyTo(temp); + Mat newMat = ImgProcessing::correctGammaOnMono12(temp, 2.2); + Mat newMat2 = Conversion::convertTo8UC1(newMat); + SaveImg::saveJPEG(newMat2, mOutputDataPath + fileName); + + } + + break; + + default : + + { + + Mat temp; + img.mImg.copyTo(temp); + Mat newMat = ImgProcessing::correctGammaOnMono8(temp, 2.2); + SaveImg::saveJPEG(newMat, mOutputDataPath + fileName); + + } + + } + } + + break; + + case FITS : + + { + + Fits2D newFits(mOutputDataPath); + newFits.loadKeys(mfkp, mstp); + newFits.kGAINDB = img.mGain; + newFits.kEXPOSURE = img.mExposure/1000000.0; + newFits.kONTIME = img.mExposure/1000000.0; + newFits.kELAPTIME = img.mExposure/1000000.0; + newFits.kDATEOBS = TimeDate::getIsoExtendedFormatDate(img.mDate); + + double debObsInSeconds = img.mDate.hours*3600 + img.mDate.minutes*60 + img.mDate.seconds; + double julianDate = TimeDate::gregorianToJulian(img.mDate); + double julianCentury = TimeDate::julianCentury(julianDate); + + newFits.kCRVAL1 = TimeDate::localSideralTime_2(julianCentury, img.mDate.hours, img.mDate.minutes, (int)img.mDate.seconds, mstp.SITELONG); + newFits.kCTYPE1 = "RA---ARC"; + newFits.kCTYPE2 = "DEC--ARC"; + newFits.kEQUINOX = 2000.0; + + switch(img.mFormat) { + + case MONO12 : + + { + + // Convert unsigned short type image in short type image. + Mat newMat = Mat(img.mImg.rows, img.mImg.cols, CV_16SC1, Scalar(0)); + + // Set bzero and bscale for print unsigned short value in soft visualization. + newFits.kBZERO = 32768; + newFits.kBSCALE = 1; + + unsigned short *ptr = NULL; + short *ptr2 = NULL; + + for(int i = 0; i < img.mImg.rows; i++){ + + ptr = img.mImg.ptr(i); + ptr2 = newMat.ptr(i); + + for(int j = 0; j < img.mImg.cols; j++){ + + if(ptr[j] - 32768 > 32767){ + + ptr2[j] = 32767; + + }else{ + + ptr2[j] = ptr[j] - 32768; + } + } + } + + // Create FITS image with BITPIX = SHORT_IMG (16-bits signed integers), pixel with TSHORT (signed short) + if(newFits.writeFits(newMat, S16, fileName)) + cout << ">> Fits saved in : " << mOutputDataPath << fileName << endl; + + } + + break; + + default : + + { + + if(newFits.writeFits(img.mImg, UC8, fileName)) + cout << ">> Fits saved in : " << mOutputDataPath << fileName << endl; + + } + + } + + } + + break; + + } + + } + } + +} + +bool AcqThread::computeSunTimes() { + + int sunriseStartH = 0, sunriseStartM = 0, sunriseStopH = 0, sunriseStopM = 0, + sunsetStartH = 0, sunsetStartM = 0, sunsetStopH = 0, sunsetStopM = 0; + + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + string date = to_iso_extended_string(time); + vector intDate = TimeDate::getIntVectorFromDateString(date); + + string month = Conversion::intToString(intDate.at(1)); + if(month.size() == 1) month = "0" + month; + string day = Conversion::intToString(intDate.at(2)); + if(day.size() == 1) day = "0" + day; + mCurrentDate = Conversion::intToString(intDate.at(0)) + month + day; + mCurrentTime = intDate.at(3) * 3600 + intDate.at(4) * 60 + intDate.at(5); + + cout << "LOCAL DATE : " << mCurrentDate << endl; + + if(mcp.ephem.EPHEMERIS_ENABLED) { + + Ephemeris ephem1 = Ephemeris(mCurrentDate, mcp.ephem.SUN_HORIZON_1, mstp.SITELONG, mstp.SITELAT); + + if(!ephem1.computeEphemeris(sunriseStartH, sunriseStartM,sunsetStopH, sunsetStopM)) { + + return false; + + } + + Ephemeris ephem2 = Ephemeris(mCurrentDate, mcp.ephem.SUN_HORIZON_2, mstp.SITELONG, mstp.SITELAT ); + + if(!ephem2.computeEphemeris(sunriseStopH, sunriseStopM,sunsetStartH, sunsetStartM)) { + + return false; + + } + + }else { + + sunriseStartH = mcp.ephem.SUNRISE_TIME.at(0); + sunriseStartM = mcp.ephem.SUNRISE_TIME.at(1); + + double intpart1 = 0; + double fractpart1 = modf((double)mcp.ephem.SUNRISE_DURATION/3600.0 , &intpart1); + + if(intpart1!=0) { + + if(sunriseStartH + intpart1 < 24) { + + sunriseStopH = sunriseStartH + intpart1; + + + }else { + + sunriseStopH = sunriseStartH + intpart1 - 24; + + } + + }else { + + sunriseStopH = sunriseStartH; + + } + + double intpart2 = 0; + double fractpart2 = modf(fractpart1 * 60 , &intpart2); + + if(sunriseStartM + intpart2 < 60) { + + sunriseStopM = sunriseStartM + intpart2; + + }else { + + + if(sunriseStopH + 1 < 24) { + + sunriseStopH += 1; + + }else { + + sunriseStopH = sunriseStopH + 1 - 24; + + } + + + sunriseStopM = intpart2; + + } + + sunsetStartH = mcp.ephem.SUNSET_TIME.at(0); + sunsetStartM = mcp.ephem.SUNSET_TIME.at(1); + + double intpart3 = 0; + double fractpart3 = modf((double)mcp.ephem.SUNSET_DURATION/3600.0 , &intpart3); + + if(intpart3!=0) { + + if(sunsetStartH + intpart3 < 24) { + + sunsetStopH = sunsetStartH + intpart3; + + }else { + + sunsetStopH = sunsetStartH + intpart3 - 24; + + } + + }else { + + sunsetStopH = sunsetStartH; + + } + + double intpart4 = 0; + double fractpart4 = modf(fractpart3 * 60 , &intpart4); + + if(sunsetStartM + intpart4 < 60) { + + sunsetStopM = sunsetStartM + intpart4; + + }else { + + + if(sunsetStopH + 1 < 24) { + + sunsetStopH += 1; + + }else { + + sunsetStopH = sunsetStopH + 1 - 24; + + } + + sunsetStopM = intpart4; + + } + + } + + cout << "SUNRISE : " << sunriseStartH << "H" << sunriseStartM << " - " << sunriseStopH << "H" << sunriseStopM << endl; + cout << "SUNSET : " << sunsetStartH << "H" << sunsetStartM << " - " << sunsetStopH << "H" << sunsetStopM << endl; + + mStartSunriseTime = sunriseStartH * 3600 + sunriseStartM * 60; + mStopSunriseTime = sunriseStopH * 3600 + sunriseStopM * 60; + mStartSunsetTime = sunsetStartH * 3600 + sunsetStartM * 60; + mStopSunsetTime = sunsetStopH * 3600 + sunsetStopM * 60; + + return true; + +} + +bool AcqThread::prepareAcquisitionOnDevice() { + + + // SET SIZE + if(!mDevice->setCameraSize()) + return false; + + // SET FORMAT + if(!mDevice->setCameraPixelFormat()) + return false; + + // LOAD GET BOUNDS + mDevice->getCameraExposureBounds(); + mDevice->getCameraGainBounds(); + + // Get Sunrise start/stop, Sunset start/stop. --- + computeSunTimes(); + + // CHECK SUNRISE AND SUNSET TIMES. + + if((mCurrentTime > mStopSunsetTime) || (mCurrentTime < mStartSunriseTime)) { + + BOOST_LOG_SEV(logger, notification) << "DAYTIME : NO"; + BOOST_LOG_SEV(logger, notification) << "AUTO EXPOSURE : NO"; + BOOST_LOG_SEV(logger, notification) << "EXPOSURE TIME : " << mDevice->getNightExposureTime(); + BOOST_LOG_SEV(logger, notification) << "GAIN : " << mDevice->getNightGain(); + + if(!mDevice->setCameraNightExposureTime()) + return false; + + if(!mDevice->setCameraNightGain()) + return false; + + }else if((mCurrentTime > mStopSunriseTime && mCurrentTime < mStartSunsetTime)) { + + BOOST_LOG_SEV(logger, notification) << "DAYTIME : YES"; + BOOST_LOG_SEV(logger, notification) << "AUTO EXPOSURE : NO"; + BOOST_LOG_SEV(logger, notification) << "EXPOSURE TIME : " << mDevice->getDayExposureTime(); + BOOST_LOG_SEV(logger, notification) << "GAIN : " << mDevice->getDayGain(); + + if(!mDevice->setCameraDayExposureTime()) + return false; + + if(!mDevice->setCameraDayGain()) + return false; + + }else{ + + BOOST_LOG_SEV(logger, notification) << "DAYTIME : NO"; + BOOST_LOG_SEV(logger, notification) << "AUTO EXPOSURE : YES"; + BOOST_LOG_SEV(logger, notification) << "EXPOSURE TIME : Minimum (" << mDevice->mMinExposureTime << ")"<< mDevice->getNightExposureTime(); + BOOST_LOG_SEV(logger, notification) << "GAIN : Minimum (" << mDevice->mMinGain << ")"; + + if(!mDevice->setCameraExposureTime(mDevice->mMinExposureTime)) + return false; + + if(!mDevice->setCameraGain(mDevice->mMinGain)) + return false; + + } + + // SET FPS. + if(!mDevice->setCameraFPS()) + return false; + + // INIT CAMERA. + if(!mDevice->initializeCamera()) + return false; + + // START CAMERA. + if(!mDevice->startCamera()) + return false; + + return true; + +} + diff --git a/AcqThread.h b/AcqThread.h new file mode 100644 index 0000000..0aa4b64 --- /dev/null +++ b/AcqThread.h @@ -0,0 +1,186 @@ +/* + AcqThread.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2016 Yoan Audureau, Chiara Marmo -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 03/10/2016 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file AcqThread.h +* \author Yoan Audureau, Chiara Marmo -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/10/2016 +* \brief Acquisition thread. +*/ + +#ifndef ACQTHREAD_H +#define ACQTHREAD_H + +#include "config.h" + +#ifdef LINUX + #define BOOST_LOG_DYN_LINK 1 +#endif + +#include "ECamPixFmt.h" +#include "EImgFormat.h" +#include "DetThread.h" +#include "StackThread.h" +#include "Device.h" +#include "ExposureControl.h" +#include "ImgProcessing.h" +#include "Ephemeris.h" +#include "Fits2D.h" +#include "SParam.h" +//#include "SpriteThread.h" +//#include "SpriteReader.h" + +using namespace cv; +using namespace std; + +class AcqThread { + + private : + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public: + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("AcqThread")); + + } + + }initializer; + + bool mMustStop; // Signal to stop thread. + boost::mutex mMustStopMutex; + boost::thread *mThread; // Acquisition thread. + bool mThreadTerminated; // Terminated status of the thread. + Device *mDevice; // Device used for acquisition. + int mDeviceID; // Index of the device to use. + scheduleParam mNextAcq; // Next scheduled acquisition. + int mNextAcqIndex; + DetThread *pDetection; // Pointer on detection thread in order to stop it or reset it when a regular capture occurs. + StackThread *pStack; // Pointer on stack thread in order to save and reset a stack when a regular capture occurs. + ExposureControl *pExpCtrl; // Pointer on exposure time control object while sunrise and sunset. + string mOutputDataPath; // Dynamic location where to save data (regular captures etc...). + string mCurrentDate; + int mStartSunriseTime; // In seconds. + int mStopSunriseTime; // In seconds. + int mStartSunsetTime; // In seconds. + int mStopSunsetTime; // In seconds. + int mCurrentTime; // In seconds. + + // Parameters from configuration file. + stackParam msp; + stationParam mstp; + detectionParam mdtp; + cameraParam mcp; + dataParam mdp; + fitskeysParam mfkp; + framesParam mfp; + videoParam mvp; + + // Communication with the shared framebuffer. + boost::condition_variable *frameBuffer_condition; + boost::mutex *frameBuffer_mutex; + boost::circular_buffer *frameBuffer; + //boost::circular_buffer *frameSprite; + //vector *frameSprite; + + // Communication with DetThread. + bool *stackSignal; + boost::mutex *stackSignal_mutex; + boost::condition_variable *stackSignal_condition; + + // Communication with StackThread. + bool *detSignal; + boost::mutex *detSignal_mutex; + boost::condition_variable *detSignal_condition; + + //SPRITE + /*SpriteThread *sprite; + SpriteReader spr_reader; + mutex *mut_sprite;*/ + + public : + + AcqThread( boost::circular_buffer *fb, + //boost::circular_buffer *frame_sprite, + //vector *frame_sprite, + boost::mutex *fb_m, + boost::condition_variable *fb_c, + bool *sSignal, + boost::mutex *sSignal_m, + boost::condition_variable *sSignal_c, + bool *dSignal, + boost::mutex *dSignal_m, + boost::condition_variable *dSignal_c, + DetThread *detection, + StackThread *stack, + int cid, + dataParam dp, + stackParam sp, + stationParam stp, + detectionParam dtp, + cameraParam acq, + framesParam fp, + videoParam vp, + fitskeysParam fkp); + + ~AcqThread(void); + + void operator()(); + + void stopThread(); + + bool startThread(); + + // Return activity status. + bool getThreadStatus(); + + private : + + // Compute in seconds the sunrise start/stop times and the sunset start/stop times. + bool computeSunTimes(); + + // Build the directory where the data will be saved. + bool buildAcquisitionDirectory(string YYYYMMDD); + + // Analyse the scheduled acquisition list to find the next one according to the current time. + void selectNextAcquisitionSchedule(TimeDate::Date date); + + // Save a capture on disk. + void saveImageCaptured(Frame &img, int imgNum, ImgFormat outputType, string imgPrefix); + + // Run a regular or scheduled acquisition. + void runImageCapture(int imgNumber, int imgExposure, int imgGain, CamPixFmt imgFormat, ImgFormat imgOutput, string imgPrefix); + + // Prepare the device for a continuous acquisition. + bool prepareAcquisitionOnDevice(); +}; + +#endif diff --git a/Base64.cpp b/Base64.cpp new file mode 100644 index 0000000..42c8b42 --- /dev/null +++ b/Base64.cpp @@ -0,0 +1,54 @@ +/* + Base64.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Base64.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 26/11/2014 +* \brief Handle Base64 encryption. +*/ + +#include "Base64.h" + +string Base64::encodeBase64(string data){ + + stringstream os; + + typedef boost::archive::iterators::base64_from_binary< // Convert binary values to base64 characters. + boost::archive::iterators::transform_width // Retrieve 6 bit integers from a sequence of 8 bit bytes. + >base64_text; // Compose all the above operations in to a new iterator. + + copy( + base64_text(data.c_str()), + base64_text(data.c_str() + data.size()), + boost::archive::iterators::ostream_iterator(os) + ); + + return os.str(); + +} diff --git a/Base64.h b/Base64.h new file mode 100644 index 0000000..fd01f27 --- /dev/null +++ b/Base64.h @@ -0,0 +1,70 @@ +/* + Base64.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 26/11/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Base64.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 26/11/2014 +* \brief Handle Base64 encryption. +*/ + +#pragma once + +#include +#include +#include +#include +#include + +using namespace std; + +class Base64 { + + public : + + /** + * Constructor. + */ + Base64() {}; + + /** + * Destructor. + */ + ~Base64() {}; + + /** + * Encode string data with base64. + * + * @param data String to encode. + * @return Encoded string. + */ + static string encodeBase64(string data); + +}; + + diff --git a/Camera.h b/Camera.h new file mode 100644 index 0000000..604bac3 --- /dev/null +++ b/Camera.h @@ -0,0 +1,278 @@ +/* + Camera.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 21/01/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Camera.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 13/06/2014 +* \brief +*/ + +#pragma once + +#include "config.h" +#include "opencv2/highgui/highgui.hpp" +#include +#include "ECamPixFmt.h" +#include "Frame.h" +#include "EInputDeviceType.h" + +using namespace cv; +using namespace std; + +class Camera { + + public : + + bool mExposureAvailable; + bool mGainAvailable; + bool mCamSizeToMax; + int mCamSizeWidth; + int mCamSizeHeight; + InputDeviceType mInputDeviceType; + bool mVerbose; + + public : + + Camera() { + + mInputDeviceType = UNDEFINED_INPUT_TYPE; + mVerbose = true; + + } + + virtual ~Camera() {}; + + virtual vector> getCamerasList() { + + vector> v; + return v; + + } + + virtual void getAvailablePixelFormats() {}; + + /** + * List connected GigE devices. + * + */ + virtual bool listCameras() {return false;}; + + /** + * Get informations about a specific device. + * + */ + virtual bool getInfos() {return false;}; + + /** + * Open/create a device. + * + * @param id Identification number of the camera to create. + */ + virtual bool createDevice(int id) {return false;}; + + /** + * Get camera name from its ID. + * + * @param id Identification number of the camera from which the name is required. + * @param device The camera's name found. + * @return Success status to find camera's name. + */ + virtual bool getDeviceNameById(int id, string &deviceName) {return false;}; + + virtual bool getCameraName() {return false;}; + + InputDeviceType getDeviceType() {return mInputDeviceType;}; + + /** + * Get device's grabbing status. + * + * @return Device grabs frames or not. + */ + virtual bool getStopStatus() {return false;}; + + /** + * Prepare device to grab frames. + * + * @return Success status to prepare camera. + */ + virtual bool grabInitialization() {return false;}; + + /** + * Run acquisition. + * + */ + virtual bool acqStart() {return false;}; + + /** + * Stop acquisition. + * + */ + virtual void acqStop() {}; + + /** + * Close a device and clean resources. + * + */ + virtual void grabCleanse() {}; + + /** + * Get a frame from continuous acquisition. + * + * @param newFrame New frame's container object. + * @return Success status to grab a frame. + */ + virtual bool grabImage(Frame &newFrame) {return false;}; + + /** + * Get a frame from single acquisition. + * + * @param newFrame Frame's container object. + * @param camID Device's identification number from which the single acquisition will be performed. + * @return Success status to grab a frame. + */ + virtual bool grabSingleImage(Frame &frame, int camID) {return false;}; + + /** + * Get device's exposure time bounds. + * + * @param eMin Return minimum exposure time value. + * @param eMax Return maximum exposure time value. + */ + virtual void getExposureBounds(double &eMin, double &eMax) {}; + + /** + * Get device's gain bounds. + * + * @param gMin Return minimum gain value. + * @param gMax Return maximum gain value. + */ + virtual void getGainBounds(int &gMin, int &gMax) {}; + + /** + * Get device's image format. + * + * @param format Return image format. + * @return Success status to get format. + */ + virtual bool getPixelFormat(CamPixFmt &format) {return false;}; + + /** + * Get device's frame size. + * + * @param frame's width + * @param frame's height + * @return Success to get frame'size. + */ + virtual bool getFrameSize(int &w, int &h) {return false;}; + + /** + * Get device's acquisition frequency. + * + * @return Device's fps. + */ + virtual bool getFPS(double &value) {return false;}; + + /** + * Get FPS enumeration values. + * + * @return Possible fps values. + */ + virtual bool getFpsEnum(vector &values) {return false;}; + + /** + * Get device's model name. + * + * @return Device's model name. + */ + virtual string getModelName() {return "";}; + + /** + * Get device's gain value. + * + * @return Device's gain. + */ + virtual int getGain() {return 0;}; + + /** + * Get device's exposure time value. + * + * @return Device's exposure time. + */ + virtual double getExposureTime() {return 0.0;}; + + /** + * Set device's exposure time value. + * + * @param value New exposure time value (us). + * @return Success status to set new exposure time. + */ + virtual bool setExposureTime(double value) {return false;}; + + /** + * Set device's gain value. + * + * @param value New gain value. + * @return Success status to set new gain. + */ + virtual bool setGain(int value) {return false;}; + + /** + * Set device's acquisition frequency. + * + * @param value New fps value. + * @return Success status to set fps. + */ + virtual bool setFPS(double value) {return false;}; + + virtual bool setSize(int width, int height, bool customSize) {return false;}; + + /** + * Set device's format. + * + * @param format New format. + * @return Success status to set format. + */ + virtual bool setPixelFormat(CamPixFmt format) {return false;}; + + /** + * Get data status if a set of directories or videos are used in input. + * + * @return If there is still recorded frames to load in input. + */ + virtual bool getDataSetStatus() {return false;}; + + /** + * Load next data set of frames. + * + * @return Success status to load next data set. + */ + virtual bool loadNextDataSet(string &location) {location = ""; return true; }; + + virtual void test() {cout << " in camera.h" << endl;}; + +}; diff --git a/CameraFrames.cpp b/CameraFrames.cpp new file mode 100644 index 0000000..f82554d --- /dev/null +++ b/CameraFrames.cpp @@ -0,0 +1,335 @@ +/* + CameraFrames.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraFrames.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 02/09/2014 +* \brief Fits frames in input of acquisition thread. +*/ + +#include "CameraFrames.h" + +boost::log::sources::severity_logger< LogSeverityLevel > CameraFrames::logger; + +CameraFrames::Init CameraFrames::initializer; + +CameraFrames::CameraFrames(vector locationList, int numPos, bool verbose): +mNumFramePos(numPos), mReadDataStatus(false), mCurrDirId(0), +mFirstFrameNum(0), mLastFrameNum(0) { + + if(locationList.size()>0) + mFramesDir = locationList; + else + throw "No frames directory in input."; + + mExposureAvailable = false; + mGainAvailable = false; + mInputDeviceType = SINGLE_FITS_FRAME; + mVerbose = verbose; + +} + +CameraFrames::~CameraFrames(void) { + +} + +bool CameraFrames::loadNextDataSet(string &location) { + + cout << mCurrDirId << endl; + + location = mFramesDir.at(mCurrDirId); + + //if(mCurrDirId !=0 ) { + + mReadDataStatus = false; + + if(!searchMinMaxFramesNumber(mFramesDir.at(mCurrDirId))) + return false; + + //} + + return true; + +} + +bool CameraFrames::grabInitialization() { + + return searchMinMaxFramesNumber(mFramesDir.at(mCurrDirId)); + +} + +bool CameraFrames::getDataSetStatus() { + + mCurrDirId++; + + if(mCurrDirId >= mFramesDir.size()) return false; + else return true; +} + +bool CameraFrames::getCameraName() { + cout << "Fits frames data." << endl; + return true; +} + +bool CameraFrames::searchMinMaxFramesNumber(string location) { + + namespace fs = boost::filesystem; + + path p(location); + + if(fs::exists(p)){ + + if(mVerbose) BOOST_LOG_SEV(logger, normal) << "Frame's directory exists : " << location; + + int firstFrame = -1, lastFrame = 0; + string filename = ""; + + // Search first and last frames numbers in the directory. + for(directory_iterator file(p);file!= directory_iterator(); ++file) { + + path curr(file->path()); + + if(is_regular_file(curr)) { + + // Get file name. + string fname = curr.filename().string(); + + // Split file name according to the separator "_". + vector output; + typedef boost::tokenizer > tokenizer; + boost::char_separator sep("_"); + tokenizer tokens(fname, sep); + + for (tokenizer::iterator tok_iter = tokens.begin();tok_iter != tokens.end(); ++tok_iter) { + output.push_back(*tok_iter); + } + + // Search frame number according to the number position known in the file name. + + int i = 0, number = 0; + + for(int j = 0; j < output.size(); j++) { + + if(j == mNumFramePos && j != output.size() - 1) { + + number = atoi(output.at(j).c_str()); + break; + } + + // If the frame number is at the end (before the file extension). + if(j == mNumFramePos && j == output.size() - 1) { + + vector output2; + typedef boost::tokenizer > tokenizer; + boost::char_separator sep2("."); + tokenizer tokens2(output.back(), sep2); + + for (tokenizer::iterator tok_iter = tokens2.begin();tok_iter != tokens2.end(); ++tok_iter) { + output2.push_back(*tok_iter); + } + + number = atoi(output2.front().c_str()); + break; + + } + + i++; + + } + + if(firstFrame == -1) { + + firstFrame = number; + + }else if(number < firstFrame) { + + firstFrame = number; + + } + + if(number > lastFrame) { + + lastFrame = number; + + } + } + + } + + if(mVerbose) BOOST_LOG_SEV(logger, normal) << "First frame number in frame's directory : " << firstFrame; + if(mVerbose) BOOST_LOG_SEV(logger, normal) << "Last frame number in frame's directory : " << lastFrame; + + mLastFrameNum = lastFrame; + mFirstFrameNum = firstFrame; + + return true; + + }else{ + + if(mVerbose) BOOST_LOG_SEV(logger, fail) << "Frame's directory not found."; + if(mVerbose) cout << "Frame's directory not found." << endl; + return false; + + } + +} + +bool CameraFrames::getStopStatus() { + + return mReadDataStatus; + +} + +bool CameraFrames::getFPS(double &value) { + + value = 0; + return false; + +} + +bool CameraFrames::grabImage(Frame &img) { + + bool fileFound = false; + + string filename = ""; + + path p(mFramesDir.at(mCurrDirId)); + + /// Search a frame in the directory. + for(directory_iterator file(p);file!= directory_iterator(); ++file){ + + path curr(file->path()); + + if(is_regular_file(curr)){ + + list ch; + string fname = curr.filename().string(); + Conversion::stringTok(ch, fname.c_str(), "_"); + list::const_iterator lit(ch.begin()), lend(ch.end()); + int i = 0; + int number = 0; + + for(; lit != lend; ++lit){ + + if(i == mNumFramePos && i != ch.size() - 1){ + + number = atoi((*lit).c_str()); break; + } + + if(i == ch.size() - 1){ + + list ch_; + Conversion::stringTok(ch_, (*lit).c_str(), "."); + number = atoi(ch_.front().c_str()); + break; + + } + + i++; + + } + + if(number == mFirstFrameNum){ + + mFirstFrameNum++; + fileFound = true; + + cout << "FILE:" << file->path().string() << endl; + BOOST_LOG_SEV(logger, normal) << "FILE:" << file->path().string(); + + filename = file->path().string() ; + + break; + + } + } + } + + + if(mFirstFrameNum > mLastFrameNum || !fileFound){ + + mReadDataStatus = true; + BOOST_LOG_SEV(logger, normal) << "End read frames."; + return false; + + }else{ + + BOOST_LOG_SEV(logger, normal) << "Frame found."; + + Fits2D newFits(filename); + int bitpix; + + if(!newFits.readIntKeyword("BITPIX", bitpix)){ + BOOST_LOG_SEV(logger, fail) << " Fail to read fits keyword : BITPIX"; + + return false; + } + + /// Read the frame. + + Mat resMat; + CamPixFmt frameFormat = MONO8; + + switch(bitpix){ + + case 8 : + + frameFormat = MONO8; + newFits.readFits8UC(resMat); + + break; + + case 16 : + + frameFormat = MONO12; + newFits.readFits16S(resMat); + + break; + + } + + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + + Frame f = Frame(resMat, 0, 0, to_iso_extended_string(time)); + + img = f; + + img.mFrameNumber = mFirstFrameNum -1 ; + img.mFrameRemaining = mLastFrameNum - mFirstFrameNum-1; + img.mFps = 1; + img.mFormat = frameFormat; + + //waitKey(1000); + + + return true; + + } + +} diff --git a/CameraFrames.h b/CameraFrames.h new file mode 100644 index 0000000..96f9801 --- /dev/null +++ b/CameraFrames.h @@ -0,0 +1,128 @@ +/* + CameraFrames.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraFrames.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 02/09/2014 +* \brief Fits frames in input of acquisition thread. +*/ + +#pragma once +#include "config.h" +#include "opencv2/highgui/highgui.hpp" +#include + +#ifdef LINUX +#define BOOST_LOG_DYN_LINK 1 +#endif + +#include +#include "ELogSeverityLevel.h" +#include "Conversion.h" +#include "TimeDate.h" +#include "Frame.h" +#include "Fits2D.h" +#include "Fits.h" +#include +#include +#include +#include + +#include "Camera.h" + +using namespace boost::posix_time; +using namespace cv; +using namespace std; + +class CameraFrames: public Camera { + + private: + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public: + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("CameraFrames")); + + } + + } initializer; + + bool searchMinMaxFramesNumber(string location); + + vector mFramesDir; // List of frames directories to process. + int mNumFramePos; // Position of the frame number in its filename. + int mFirstFrameNum; // First frame number in a directory. + int mLastFrameNum; // Last frame number in a directory. + bool mReadDataStatus; // Signal the end of reading data in a directory. + int mCurrDirId; // Id of the directory to use. + string mCurrDir; // Path of the directory to use. + + public: + + CameraFrames(vector locationList, int numPos, bool verbose); + + ~CameraFrames(); + + bool acqStart() {return true;}; + + bool createDevice(int id) { return true;}; + + bool listCameras() {return true;}; + + bool grabInitialization(); + + bool grabImage(Frame &img); + + bool getStopStatus(); + + bool loadNextDataSet(string &location); + + bool getDataSetStatus(); + + bool getFPS(double &value); + + bool setExposureTime(double exp){return true;}; + + bool setGain(int gain) {return true;}; + + bool setFPS(double fps){return true;}; + + bool setPixelFormat(CamPixFmt format){return true;}; + + bool setSize(int width, int height, bool customSize) {return true;}; + + bool getCameraName(); + +}; + diff --git a/CameraGigeAravis.cpp b/CameraGigeAravis.cpp new file mode 100644 index 0000000..d5129d2 --- /dev/null +++ b/CameraGigeAravis.cpp @@ -0,0 +1,1059 @@ +/* + CameraGigeAravis.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2016 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 16/05/2016 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraGigeAravis.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 16/05/2016 +* \brief Use Aravis library to pilot GigE Cameras. +* https://wiki.gnome.org/action/show/Projects/Aravis?action=show&redirect=Aravis +*/ + +#include "CameraGigeAravis.h" + +#ifdef LINUX + + boost::log::sources::severity_logger< LogSeverityLevel > CameraGigeAravis::logger; + CameraGigeAravis::Init CameraGigeAravis::initializer; + + CameraGigeAravis::CameraGigeAravis(bool shift): + camera(NULL), mWidth(0), mHeight(0), fps(0), gainMin(0.0), gainMax(0.0), + payload(0), exposureMin(0), exposureMax(0), gain(0), exp(0), nbCompletedBuffers(0), + nbFailures(0), nbUnderruns(0), frameCounter(0), shiftBitsImage(shift), stream(NULL) { + mExposureAvailable = true; + mGainAvailable = true; + mInputDeviceType = CAMERA; + } + + CameraGigeAravis::CameraGigeAravis(): + camera(NULL), mWidth(0), mHeight(0), fps(0), gainMin(0.0), gainMax(0.0), + payload(0), exposureMin(0), exposureMax(0), gain(0), exp(0), nbCompletedBuffers(0), + nbFailures(0), nbUnderruns(0), frameCounter(0), shiftBitsImage(false), stream(NULL) { + mExposureAvailable = true; + mGainAvailable = true; + mInputDeviceType = CAMERA; + } + + CameraGigeAravis::~CameraGigeAravis(){ + + if(stream != NULL) + g_object_unref(stream); + + if(camera != NULL) + g_object_unref(camera); + + } + + vector> CameraGigeAravis::getCamerasList() { + + vector> camerasList; + + ArvInterface *interface; + + //arv_update_device_list(); + + int ni = arv_get_n_interfaces(); + + + for (int j = 0; j< ni; j++){ + + const char* name = arv_get_interface_id (j); + if (strcmp(name,"GigEVision") == 0) { + interface = arv_gv_interface_get_instance(); + arv_interface_update_device_list(interface); + //int nb = arv_get_n_devices(); + + int nb = arv_interface_get_n_devices(interface); + + for(int i = 0; i < nb; i++){ + + pair c; + c.first = i; + //const char* str = arv_get_device_id(i); + const char* str = arv_interface_get_device_id(interface,i); + const char* addr = arv_interface_get_device_address(interface,i); + std::string s = str; + c.second = "NAME[" + s + "] SDK[ARAVIS] IP: " + addr; + camerasList.push_back(c); + } + } + } + + return camerasList; + + } + + bool CameraGigeAravis::listCameras(){ + + ArvInterface *interface; + //arv_update_device_list(); + + int ni = arv_get_n_interfaces (); + + cout << endl << "------------ GIGE CAMERAS WITH ARAVIS ----------" << endl << endl; + + for (int j = 0; j< ni; j++){ + + interface = arv_gv_interface_get_instance(); + arv_interface_update_device_list(interface); + //int nb = arv_get_n_devices(); + + int nb = arv_interface_get_n_devices(interface); + for(int i = 0; i < nb; i++){ + + cout << "-> [" << i << "] " << arv_interface_get_device_id(interface,i)<< endl; + //cout << "-> [" << i << "] " << arv_get_device_id(i)<< endl; + + } + + if(nb == 0) { + cout << "-> No cameras detected..." << endl; + return false; + } + } + cout << endl << "------------------------------------------------" << endl << endl; + + return true; + + } + + bool CameraGigeAravis::createDevice(int id){ + + string deviceName; + + if(!getDeviceNameById(id, deviceName)) + return false; + + camera = arv_camera_new(deviceName.c_str()); + + if(camera == NULL){ + + BOOST_LOG_SEV(logger, fail) << "Fail to connect the camera."; + return false; + + } + + return true; + } + + bool CameraGigeAravis::setSize(int width, int height, bool customSize) { + + if(customSize) { + + arv_camera_set_region(camera, 0, 0,width,height); + arv_camera_get_region (camera, NULL, NULL, &mWidth, &mHeight); + BOOST_LOG_SEV(logger, notification) << "Camera region size : " << mWidth << "x" << mHeight; + + // Default is maximum size + }else { + + int sensor_width, sensor_height; + + arv_camera_get_sensor_size(camera, &sensor_width, &sensor_height); + BOOST_LOG_SEV(logger, notification) << "Camera sensor size : " << sensor_width << "x" << sensor_height; + + arv_camera_set_region(camera, 0, 0,sensor_width,sensor_height); + arv_camera_get_region (camera, NULL, NULL, &mWidth, &mHeight); + + } + + return true; + + } + + bool CameraGigeAravis::getDeviceNameById(int id, string &device){ + + arv_update_device_list(); + + int n_devices = arv_get_n_devices(); + + for(int i = 0; i< n_devices; i++){ + + if(id == i){ + + device = arv_get_device_id(i); + return true; + + } + } + + BOOST_LOG_SEV(logger, fail) << "Fail to retrieve camera with this ID."; + return false; + + } + + bool CameraGigeAravis::grabInitialization(){ + + frameCounter = 0; + + payload = arv_camera_get_payload (camera); + BOOST_LOG_SEV(logger, notification) << "Camera payload : " << payload; + + pixFormat = arv_camera_get_pixel_format(camera); + + arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax); + BOOST_LOG_SEV(logger, notification) << "Camera exposure bound min : " << exposureMin; + BOOST_LOG_SEV(logger, notification) << "Camera exposure bound max : " << exposureMax; + + arv_camera_get_gain_bounds (camera, &gainMin, &gainMax); + BOOST_LOG_SEV(logger, notification) << "Camera gain bound min : " << gainMin; + BOOST_LOG_SEV(logger, notification) << "Camera gain bound max : " << gainMax; + + arv_camera_set_frame_rate(camera, 30); + + fps = arv_camera_get_frame_rate(camera); + BOOST_LOG_SEV(logger, notification) << "Camera frame rate : " << fps; + + capsString = arv_pixel_format_to_gst_caps_string(pixFormat); + BOOST_LOG_SEV(logger, notification) << "Camera format : " << capsString; + + gain = arv_camera_get_gain(camera); + BOOST_LOG_SEV(logger, notification) << "Camera gain : " << gain; + + exp = arv_camera_get_exposure_time(camera); + BOOST_LOG_SEV(logger, notification) << "Camera exposure : " << exp; + + cout << endl; + + cout << "DEVICE SELECTED : " << arv_camera_get_device_id(camera) << endl; + cout << "DEVICE NAME : " << arv_camera_get_model_name(camera) << endl; + cout << "DEVICE VENDOR : " << arv_camera_get_vendor_name(camera) << endl; + cout << "PAYLOAD : " << payload << endl; + cout << "Width : " << mWidth << endl + << "Height : " << mHeight << endl; + cout << "Exp Range : [" << exposureMin << " - " << exposureMax << "]" << endl; + cout << "Exp : " << exp << endl; + cout << "Gain Range : [" << gainMin << " - " << gainMax << "]" << endl; + cout << "Gain : " << gain << endl; + cout << "Fps : " << fps << endl; + cout << "Type : " << capsString << endl; + + cout << endl; + + // Create a new stream object. Open stream on Camera. + stream = arv_camera_create_stream(camera, NULL, NULL); + + if(stream == NULL){ + + BOOST_LOG_SEV(logger, critical) << "Fail to create stream with arv_camera_create_stream()"; + return false; + + } + + if (ARV_IS_GV_STREAM(stream)){ + + bool arv_option_auto_socket_buffer = true; + bool arv_option_no_packet_resend = true; + unsigned int arv_option_packet_timeout = 20; + unsigned int arv_option_frame_retention = 100; + + if(arv_option_auto_socket_buffer){ + + g_object_set(stream, + // ARV_GV_STREAM_SOCKET_BUFFER_FIXED : socket buffer is set to a given fixed value. + // ARV_GV_STREAM_SOCKET_BUFFER_AUTO: socket buffer is set with respect to the payload size. + "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, + // Socket buffer size, in bytes. + // Allowed values: >= G_MAXULONG + // Default value: 0 + "socket-buffer-size", 0, NULL); + + } + + if(arv_option_no_packet_resend){ + + // # packet-resend : Enables or disables the packet resend mechanism + + // If packet resend is disabled and a packet has been lost during transmission, + // the grab result for the returned buffer holding the image will indicate that + // the grab failed and the image will be incomplete. + // + // If packet resend is enabled and a packet has been lost during transmission, + // a request is sent to the camera. If the camera still has the packet in its + // buffer, it will resend the packet. If there are several lost packets in a + // row, the resend requests will be combined. + + g_object_set(stream, + // ARV_GV_STREAM_PACKET_RESEND_NEVER: never request a packet resend + // ARV_GV_STREAM_PACKET_RESEND_ALWAYS: request a packet resend if a packet was missing + // Default value: ARV_GV_STREAM_PACKET_RESEND_ALWAYS + "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); + + } + + g_object_set(stream, + // # packet-timeout + + // The Packet Timeout parameter defines how long (in milliseconds) we will wait for + // the next expected packet before it initiates a resend request. + + // Packet timeout, in µs. + // Allowed values: [1000,10000000] + // Default value: 40000 + "packet-timeout",/* (unsigned) arv_option_packet_timeout * 1000*/(unsigned)40000, + // # frame-retention + + // The Frame Retention parameter sets the timeout (in milliseconds) for the + // frame retention timer. Whenever detection of the leader is made for a frame, + // the frame retention timer starts. The timer resets after each packet in the + // frame is received and will timeout after the last packet is received. If the + // timer times out at any time before the last packet is received, the buffer for + // the frame will be released and will be indicated as an unsuccessful grab. + + // Packet retention, in µs. + // Allowed values: [1000,10000000] + // Default value: 200000 + "frame-retention", /*(unsigned) arv_option_frame_retention * 1000*/(unsigned) 200000,NULL); + + }else + return false; + + // Push 50 buffer in the stream input buffer queue. + for (int i = 0; i < 50; i++) + arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); + + return true; + + } + + void CameraGigeAravis::grabCleanse(){} + + bool CameraGigeAravis::acqStart(){ + + BOOST_LOG_SEV(logger, notification) << "Set camera to CONTINUOUS MODE"; + arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_CONTINUOUS); + + BOOST_LOG_SEV(logger, notification) << "Set camera TriggerMode to Off"; + arv_device_set_string_feature_value(arv_camera_get_device (camera), "TriggerMode" , "Off"); + + BOOST_LOG_SEV(logger, notification) << "Start acquisition on camera"; + arv_camera_start_acquisition(camera); + + return true; + + } + + void CameraGigeAravis::acqStop(){ + + arv_stream_get_statistics(stream, &nbCompletedBuffers, &nbFailures, &nbUnderruns); + + //cout << "Completed buffers = " << (unsigned long long) nbCompletedBuffers << endl; + //cout << "Failures = " << (unsigned long long) nbFailures << endl; + //cout << "Underruns = " << (unsigned long long) nbUnderruns << endl; + + BOOST_LOG_SEV(logger, notification) << "Completed buffers = " << (unsigned long long) nbCompletedBuffers; + BOOST_LOG_SEV(logger, notification) << "Failures = " << (unsigned long long) nbFailures; + BOOST_LOG_SEV(logger, notification) << "Underruns = " << (unsigned long long) nbUnderruns; + + BOOST_LOG_SEV(logger, notification) << "Stopping acquisition..."; + arv_camera_stop_acquisition(camera); + BOOST_LOG_SEV(logger, notification) << "Acquisition stopped."; + + BOOST_LOG_SEV(logger, notification) << "Unreferencing stream."; + g_object_unref(stream); + stream = NULL; + + BOOST_LOG_SEV(logger, notification) << "Unreferencing camera."; + g_object_unref(camera); + camera = NULL; + + } + + bool CameraGigeAravis::grabImage(Frame &newFrame){ + + ArvBuffer *arv_buffer; + //exp = arv_camera_get_exposure_time(camera); + + arv_buffer = arv_stream_timeout_pop_buffer(stream,2000000); //us + char *buffer_data; + size_t buffer_size; + + if(arv_buffer == NULL){ + + throw runtime_error("arv_buffer is NULL"); + return false; + + }else{ + + try{ + + if(arv_buffer_get_status(arv_buffer) == ARV_BUFFER_STATUS_SUCCESS){ + + //BOOST_LOG_SEV(logger, normal) << "Success to grab a frame."; + + buffer_data = (char *) arv_buffer_get_data (arv_buffer, &buffer_size); + + //Timestamping. + //string acquisitionDate = TimeDate::localDateTime(microsec_clock::universal_time(),"%Y:%m:%d:%H:%M:%S"); + //BOOST_LOG_SEV(logger, normal) << "Date : " << acquisitionDate; + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + string acquisitionDate = to_iso_extended_string(time); + + //BOOST_LOG_SEV(logger, normal) << "Date : " << acqDateInMicrosec; + + Mat image; + CamPixFmt imgDepth = MONO8; + int saturateVal = 0; + + if(pixFormat == ARV_PIXEL_FORMAT_MONO_8){ + + //BOOST_LOG_SEV(logger, normal) << "Creating Mat 8 bits ..."; + image = Mat(mHeight, mWidth, CV_8UC1, buffer_data); + imgDepth = MONO8; + saturateVal = 255; + + }else if(pixFormat == ARV_PIXEL_FORMAT_MONO_12){ + + //BOOST_LOG_SEV(logger, normal) << "Creating Mat 16 bits ..."; + image = Mat(mHeight, mWidth, CV_16UC1, buffer_data); + imgDepth = MONO12; + saturateVal = 4095; + + //double t3 = (double)getTickCount(); + + if(shiftBitsImage){ + + //BOOST_LOG_SEV(logger, normal) << "Shifting bits ..."; + + + unsigned short * p; + + for(int i = 0; i < image.rows; i++){ + p = image.ptr(i); + for(int j = 0; j < image.cols; j++) + p[j] = p[j] >> 4; + } + + //BOOST_LOG_SEV(logger, normal) << "Bits shifted."; + + } + + //t3 = (((double)getTickCount() - t3)/getTickFrequency())*1000; + //cout << "> Time shift : " << t3 << endl; + } + + //BOOST_LOG_SEV(logger, normal) << "Creating frame object ..."; + newFrame = Frame(image, gain, exp, acquisitionDate); + //BOOST_LOG_SEV(logger, normal) << "Setting date of frame ..."; + //newFrame.setAcqDateMicro(acqDateInMicrosec); + //BOOST_LOG_SEV(logger, normal) << "Setting fps of frame ..."; + newFrame.mFps = fps; + newFrame.mFormat = imgDepth; + //BOOST_LOG_SEV(logger, normal) << "Setting saturated value of frame ..."; + newFrame.mSaturatedValue = saturateVal; + newFrame.mFrameNumber = frameCounter; + frameCounter++; + + //BOOST_LOG_SEV(logger, normal) << "Re-pushing arv buffer in stream ..."; + arv_stream_push_buffer(stream, arv_buffer); + + return true; + + }else{ + + switch(arv_buffer_get_status(arv_buffer)){ + + case 0 : + cout << "ARV_BUFFER_STATUS_SUCCESS : the buffer contains a valid image"< 0 && frame.mHeight > 0) { + + arv_camera_set_region(camera, 0, 0,frame.mWidth,frame.mHeight); + arv_camera_get_region (camera, NULL, NULL, &mWidth, &mHeight); + + }else{ + + int sensor_width, sensor_height; + + arv_camera_get_sensor_size(camera, &sensor_width, &sensor_height); + + // Use maximum sensor size. + arv_camera_set_region(camera, 0, 0,sensor_width,sensor_height); + arv_camera_get_region (camera, NULL, NULL, &mWidth, &mHeight); + + } + + payload = arv_camera_get_payload (camera); + + pixFormat = arv_camera_get_pixel_format (camera); + + arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax); + + arv_camera_get_gain_bounds (camera, &gainMin, &gainMax); + + arv_camera_set_frame_rate(camera, 1); + + fps = arv_camera_get_frame_rate(camera); + + capsString = arv_pixel_format_to_gst_caps_string(pixFormat); + + gain = arv_camera_get_gain(camera); + exp = arv_camera_get_exposure_time(camera); + + cout << endl; + + cout << "DEVICE SELECTED : " << arv_camera_get_device_id(camera) << endl; + cout << "DEVICE NAME : " << arv_camera_get_model_name(camera) << endl; + cout << "DEVICE VENDOR : " << arv_camera_get_vendor_name(camera) << endl; + cout << "PAYLOAD : " << payload << endl; + cout << "Width : " << mWidth << endl + << "Height : " << mHeight << endl; + cout << "Exp Range : [" << exposureMin << " - " << exposureMax << "]" << endl; + cout << "Exp : " << exp << endl; + cout << "Gain Range : [" << gainMin << " - " << gainMax << "]" << endl; + cout << "Gain : " << gain << endl; + cout << "Fps : " << fps << endl; + cout << "Type : " << capsString << endl; + + cout << endl; + + if(arv_camera_is_gv_device (camera)) { + + // http://www.baslerweb.com/media/documents/AW00064902000%20Control%20Packet%20Timing%20With%20Delays.pdf + // https://github.com/GNOME/aravis/blob/06ac777fc6d98783680340f1c3f3ea39d2780974/src/arvcamera.c + + // Configure the inter packet delay to insert between each packet for the current stream + // channel. This can be used as a crude flow-control mechanism if the application or the network + // infrastructure cannot keep up with the packets coming from the device. + arv_camera_gv_set_packet_delay (camera, 4000); + + // Specifies the stream packet size, in bytes, to send on the selected channel for a GVSP transmitter + // or specifies the maximum packet size supported by a GVSP receiver. + arv_camera_gv_set_packet_size (camera, 1444); + + } + + // Create a new stream object. Open stream on Camera. + stream = arv_camera_create_stream(camera, NULL, NULL); + + if(stream != NULL){ + + if(ARV_IS_GV_STREAM(stream)){ + + bool arv_option_auto_socket_buffer = true; + bool arv_option_no_packet_resend = true; + unsigned int arv_option_packet_timeout = 20; + unsigned int arv_option_frame_retention = 100; + + if(arv_option_auto_socket_buffer){ + + g_object_set(stream, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer-size", 0, NULL); + + } + + if(arv_option_no_packet_resend){ + + g_object_set(stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); + + } + + g_object_set(stream, "packet-timeout", (unsigned)40000, "frame-retention", (unsigned) 200000,NULL); + + } + + // Push 50 buffer in the stream input buffer queue. + arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); + + // Set acquisition mode to continuous. + arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_SINGLE_FRAME); + + // Very usefull to avoid arv buffer timeout status + sleep(1); + + // Start acquisition. + arv_camera_start_acquisition(camera); + + // Get image buffer. + ArvBuffer *arv_buffer = arv_stream_timeout_pop_buffer(stream, frame.mExposure + 5000000); //us + + char *buffer_data; + size_t buffer_size; + + cout << ">> Acquisition in progress... (Please wait)" << endl; + + if (arv_buffer != NULL){ + + if(arv_buffer_get_status(arv_buffer) == ARV_BUFFER_STATUS_SUCCESS){ + + buffer_data = (char *) arv_buffer_get_data (arv_buffer, &buffer_size); + + //Timestamping. + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + + if(pixFormat == ARV_PIXEL_FORMAT_MONO_8){ + + Mat image = Mat(mHeight, mWidth, CV_8UC1, buffer_data); + image.copyTo(frame.mImg); + + }else if(pixFormat == ARV_PIXEL_FORMAT_MONO_12){ + + // Unsigned short image. + Mat image = Mat(mHeight, mWidth, CV_16UC1, buffer_data); + + // http://www.theimagingsource.com/en_US/support/documentation/icimagingcontrol-class/PixelformatY16.htm + // Some sensors only support 10-bit or 12-bit pixel data. In this case, the least significant bits are don't-care values. + if(shiftBitsImage){ + unsigned short * p; + for(int i = 0; i < image.rows; i++){ + p = image.ptr(i); + for(int j = 0; j < image.cols; j++) p[j] = p[j] >> 4; + } + } + + image.copyTo(frame.mImg); + + } + + frame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time)); + frame.mFps = arv_camera_get_frame_rate(camera); + + res = true; + + }else{ + + switch(arv_buffer_get_status(arv_buffer)){ + + case 0 : + + cout << "ARV_BUFFER_STATUS_SUCCESS : the buffer contains a valid image"<> Completed buffers = " << (unsigned long long) nbCompletedBuffers << endl; + cout << ">> Failures = " << (unsigned long long) nbFailures << endl; + //cout << ">> Underruns = " << (unsigned long long) nbUnderruns << endl; + + // Stop acquisition. + arv_camera_stop_acquisition(camera); + + g_object_unref(stream); + stream = NULL; + g_object_unref(camera); + camera = NULL; + + } + + return res; + + } + + void CameraGigeAravis::saveGenicamXml(string p){ + + const char *xml; + + size_t size; + + xml = arv_device_get_genicam_xml (arv_camera_get_device(camera), &size); + + if (xml != NULL){ + + ofstream infFile; + string infFilePath = p + "genicam.xml"; + infFile.open(infFilePath.c_str()); + infFile << string ( xml, size ); + infFile.close(); + + } + + } + + //https://github.com/GNOME/aravis/blob/b808d34691a18e51eee72d8cac6cfa522a945433/src/arvtool.c + void CameraGigeAravis::getAvailablePixelFormats() { + + ArvGc *genicam; + ArvDevice *device; + ArvGcNode *node; + + if(camera != NULL) { + + device = arv_camera_get_device(camera); + genicam = arv_device_get_genicam(device); + node = arv_gc_get_node(genicam, "PixelFormat"); + + if (ARV_IS_GC_ENUMERATION (node)) { + + const GSList *childs; + const GSList *iter; + vector pixfmt; + + cout << ">> Device pixel formats :" << endl; + + childs = arv_gc_enumeration_get_entries (ARV_GC_ENUMERATION (node)); + for (iter = childs; iter != NULL; iter = iter->next) { + if (arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (iter->data), NULL)) { + + if(arv_gc_feature_node_is_available (ARV_GC_FEATURE_NODE (iter->data), NULL)) { + + { + string fmt = string(arv_gc_feature_node_get_name(ARV_GC_FEATURE_NODE (iter->data))); + std::transform(fmt.begin(), fmt.end(),fmt.begin(), ::toupper); + pixfmt.push_back(fmt); + cout << "- " << fmt << endl; + + } + } + } + } + + // Compare found pixel formats to currently formats supported by freeture + + cout << endl << ">> Available pixel formats :" << endl; + EParser fmt; + + for( int i = 0; i != pixfmt.size(); i++ ) { + + if(fmt.isEnumValue(pixfmt.at(i))) { + + cout << "- " << pixfmt.at(i) << " available --> ID : " << fmt.parseEnum(pixfmt.at(i)) << endl; + + } + + } + + }else { + + cout << ">> Available pixel formats not found." << endl; + + } + + g_object_unref(device); + + } + + } + + void CameraGigeAravis::getExposureBounds(double &eMin, double &eMax){ + + double exposureMin = 0.0; + double exposureMax = 0.0; + + arv_camera_get_exposure_time_bounds(camera, &exposureMin, &exposureMax); + + eMin = exposureMin; + eMax = exposureMax; + + } + + double CameraGigeAravis::getExposureTime(){ + + return arv_camera_get_exposure_time(camera); + + } + + void CameraGigeAravis::getGainBounds(int &gMin, int &gMax){ + + double gainMin = 0.0; + double gainMax = 0.0; + + arv_camera_get_gain_bounds(camera, &gainMin, &gainMax); + + gMin = gainMin; + gMax = gainMax; + + } + + bool CameraGigeAravis::getPixelFormat(CamPixFmt &format){ + + ArvPixelFormat pixFormat = arv_camera_get_pixel_format(camera); + + switch(pixFormat){ + + case ARV_PIXEL_FORMAT_MONO_8 : + + format = MONO8; + + break; + + case ARV_PIXEL_FORMAT_MONO_12 : + + format = MONO12; + + break; + + default : + + return false; + + break; + + } + + return true; + } + + + bool CameraGigeAravis::getFrameSize(int &w, int &h) { + + if(camera != NULL) { + + int ww = 0, hh = 0; + arv_camera_get_region(camera, NULL, NULL, &ww, &h); + w = ww; + h = hh; + + } + + return false; + + } + + bool CameraGigeAravis::getFPS(double &value){ + + if(camera != NULL) { + + value = arv_camera_get_frame_rate(camera); + return true; + + } + + return false; + + } + + string CameraGigeAravis::getModelName(){ + + return arv_camera_get_model_name(camera); + + } + + bool CameraGigeAravis::setExposureTime(double val){ + + double expMin, expMax; + + arv_camera_get_exposure_time_bounds(camera, &expMin, &expMax); + + if(camera != NULL){ + + if(val >= expMin && val <= expMax) { + + exp = val; + //val = 10; + arv_camera_set_exposure_time(camera, val); + + }else{ + + cout << "> Exposure value (" << val << ") is not in range [ " << expMin << " - " << expMax << " ]" << endl; + return false; + + } + + return true; + + } + + return false; + } + + bool CameraGigeAravis::setGain(int val){ + + double gMin, gMax; + + arv_camera_get_gain_bounds (camera, &gMin, &gMax); + + if (camera != NULL){ + + if((double)val >= gMin && (double)val <= gMax){ + + gain = val; + arv_camera_set_gain (camera, (double)val); + + }else{ + + cout << "> Gain value (" << val << ") is not in range [ " << gMin << " - " << gMax << " ]" << endl; + BOOST_LOG_SEV(logger, fail) << "> Gain value (" << val << ") is not in range [ " << gMin << " - " << gMax << " ]"; + return false; + + } + + return true; + + } + + return false; + + } + + bool CameraGigeAravis::setFPS(double fps){ + + //fps = 30 + if (camera != NULL){ + + arv_camera_set_frame_rate(camera, fps); + + return true; + + } + + return false; + + } + + bool CameraGigeAravis::setPixelFormat(CamPixFmt depth){ + + if (camera != NULL){ + + switch(depth){ + + case MONO8 : + { + arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_MONO_8); + } + break; + + case MONO12 : + { + arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_MONO_12); + } + break; + + } + + return true; + } + + return false; + + } + +#endif diff --git a/CameraGigeAravis.h b/CameraGigeAravis.h new file mode 100644 index 0000000..cf68987 --- /dev/null +++ b/CameraGigeAravis.h @@ -0,0 +1,168 @@ +/* CameraGigeAravis.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2016 Yoan Audureau -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 21/01/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraGigeAravis.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 21/01/2015 +* \brief Use Aravis library to pilot GigE Cameras. +* https://wiki.gnome.org/action/show/Projects/Aravis?action=show&redirect=Aravis +*/ + +#pragma once + +#include "config.h" + +#ifdef LINUX + + #include "opencv2/highgui/highgui.hpp" + #include + + #include + #include + #include "Frame.h" + #include "TimeDate.h" + #include "Camera.h" + #include "arv.h" + #include "arvinterface.h" + #include + #include + #include "EParser.h" + + #define BOOST_LOG_DYN_LINK 1 + + #include + #include + #include + #include + #include + #include + #include + #include + #include + #include + #include "ELogSeverityLevel.h" + + using namespace cv; + using namespace std; + + class CameraGigeAravis: public Camera{ + + private: + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init{ + + public: + + Init(){ + + logger.add_attribute("ClassName", boost::log::attributes::constant("CameraGigeAravis")); + + } + + }initializer; + + ArvCamera *camera; // Camera to control. + ArvPixelFormat pixFormat; // Image format. + ArvStream *stream; // Object for video stream reception. + int mWidth; // Camera region's width. + int mHeight; // Camera region's height. + double fps; // Camera acquisition frequency. + double gainMin; // Camera minimum gain. + double gainMax; // Camera maximum gain. + unsigned int payload; // Width x height. + double exposureMin; // Camera's minimum exposure time. + double exposureMax; // Camera's maximum exposure time. + const char *capsString; + int gain; // Camera's gain. + double exp; // Camera's exposure time. + bool shiftBitsImage; // For example : bits are shifted for dmk's frames. + guint64 nbCompletedBuffers; // Number of frames successfully received. + guint64 nbFailures; // Number of frames failed to be received. + guint64 nbUnderruns; + int frameCounter; // Counter of success received frames. + + public : + + CameraGigeAravis(bool shift); + + CameraGigeAravis(); + + ~CameraGigeAravis(); + + vector> getCamerasList(); + + bool listCameras(); + + bool createDevice(int id); + + bool grabInitialization(); + + void grabCleanse(); + + bool acqStart(); + + void acqStop(); + + bool grabImage(Frame& newFrame); + + bool grabSingleImage(Frame &frame, int camID); + + bool getDeviceNameById(int id, string &device); + + void getExposureBounds(double &eMin, double &eMax); + + void getGainBounds(int &gMin, int &gMax); + + bool getPixelFormat(CamPixFmt &format); + + bool getFrameSize(int &w, int &h); + + bool getFPS(double &value); + + string getModelName(); + + double getExposureTime(); + + bool setExposureTime(double exp); + + bool setGain(int gain); + + bool setFPS(double fps); + + bool setPixelFormat(CamPixFmt depth); + + void saveGenicamXml(string p); + + bool setSize(int width, int height, bool customSize); + + void getAvailablePixelFormats(); + + }; + +#endif diff --git a/CameraGigePylon.cpp b/CameraGigePylon.cpp new file mode 100644 index 0000000..b3f30e1 --- /dev/null +++ b/CameraGigePylon.cpp @@ -0,0 +1,952 @@ +/* + CameraGigePylon.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraGigePylon.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/07/2014 +* \brief Use Pylon library to pilot GigE Cameras. +*/ + +#include "CameraGigePylon.h" + +#ifdef USE_PYLON + +boost::log::sources::severity_logger< LogSeverityLevel > CameraGigePylon::logger; + +CameraGigePylon::Init CameraGigePylon::initializer; + +CameraGigePylon::CameraGigePylon(){ + + pCamera = NULL; + pStreamGrabber = NULL; + connectionStatus = false; + mFrameCounter = 0; + mExposureAvailable = true; + mGainAvailable = true; + mInputDeviceType = CAMERA; + + // Enumerate GigE cameras + pTlFactory = &CTlFactory::GetInstance(); + pTl = pTlFactory->CreateTl(CBaslerGigECamera ::DeviceClass()); + pTl->EnumerateDevices(devices); + +} + +vector> CameraGigePylon::getCamerasList() { + + vector> camerasList; + + try { + + int id = 0; + if(!devices.empty()) { + DeviceInfoList_t::const_iterator it; + for(it = devices.begin(); it != devices.end(); ++it ) { + if(!devices.empty()){ + if(devices[id].GetFullName().find_first_of("Basler")==0||devices[id].GetFullName().find_first_of("Prosilica")==0) { + pair c; + c.first = id; + c.second = "NAME[" + devices[id].GetModelName() + "] S/N[" + devices[id].GetSerialNumber() + "] SDK[PYLON]"; + camerasList.push_back(c); + } + } + id++; + } + } + + }catch (GenICam::GenericException &e){ + + BOOST_LOG_SEV(logger,fail) << "An exception occured : " << e.GetDescription() ; + cout << "An exception occured : " << e.GetDescription() << endl; + + } + + return camerasList; + +} + +CameraGigePylon::~CameraGigePylon(void){ + + if(pStreamGrabber != NULL){ + delete pStreamGrabber; + } + + if(pCamera != NULL) { + if(pCamera->IsOpen()) pCamera->Close(); + delete pCamera; + } + + if(pTlFactory != NULL) + pTlFactory->ReleaseTl(pTl); + +} + +bool CameraGigePylon::listCameras() { + + try { + + cout << endl << "------------ GIGE CAMERAS WITH PYLON -----------" << endl << endl; + + int id = 0; + DeviceInfoList_t::const_iterator it; + + for(it = devices.begin(); it != devices.end(); ++it ) { + if(!devices.empty()){ + if(devices[id].GetFullName().find_first_of("Basler")==0||devices[id].GetFullName().find_first_of("Prosilica")==0) { + cout << "-> ID[" << id << "] NAME[" << devices[id].GetModelName().c_str() << "] S/N[" << devices[id].GetSerialNumber().c_str() <<"]"<< endl; + } + } + id++; + } + + cout << endl << "------------------------------------------------" << endl << endl; + + }catch (GenICam::GenericException &e){ + + BOOST_LOG_SEV(logger,fail) << "An exception occured : " << e.GetDescription() ; + cout << "An exception occured : " << e.GetDescription() << endl; + return false; + } + + return true; + +} + +bool CameraGigePylon::createDevice(int id){ + + try { + + if(!devices.empty()) { + + // Create a camera object + if(id >= 0 && id < devices.size()){ + pCamera = new CBaslerGigECamera( pTl->CreateDevice((devices[id]) )); + }else { + return false; + } + + // Open the camera object + pCamera->Open(); + + if(pCamera->IsOpen()) + BOOST_LOG_SEV(logger,notification) << "Success to open the device."; + + return true; + } + + }catch (GenICam::GenericException &e){ + + std::cout << e.GetDescription() << endl; + return false; + } + + return false; + +} + +bool CameraGigePylon::getDeviceNameById(int id, string &device) { + + if(!devices.empty()) { + cout << " Camera (ID:" << id << ") detected " << endl; + cout << " Name : " << devices[id].GetModelName().c_str() << endl; + return true; + } + + return false; + +} + +bool CameraGigePylon::grabInitialization(){ + + if(pCamera){ + + if(pCamera->IsOpen()){ + + try{ + + //Disable acquisition start trigger if available + { + GenApi::IEnumEntry* acquisitionStart = pCamera->TriggerSelector.GetEntry( TriggerSelector_AcquisitionStart); + + if ( acquisitionStart && GenApi::IsAvailable( acquisitionStart)){ + + pCamera->TriggerSelector.SetValue( TriggerSelector_AcquisitionStart); + pCamera->TriggerMode.SetValue( TriggerMode_Off); + + } + } + + //Disable frame start trigger if available + { + GenApi::IEnumEntry* frameStart = pCamera->TriggerSelector.GetEntry( TriggerSelector_FrameStart); + + if ( frameStart && GenApi::IsAvailable( frameStart)){ + + pCamera->TriggerSelector.SetValue( TriggerSelector_FrameStart); + pCamera->TriggerMode.SetValue( TriggerMode_Off); + + } + } + + //Set acquisition mode + pCamera->AcquisitionMode.SetValue(AcquisitionMode_Continuous); + + //Set exposure settings + pCamera->ExposureMode.SetValue(ExposureMode_Timed); + + if (!pStreamGrabber){ + + pStreamGrabber = new (CBaslerGigECamera::StreamGrabber_t)(pCamera->GetStreamGrabber(0)); + + } + + pStreamGrabber->Open(); + + // Get the image buffer size + const size_t ImageSize = (size_t)(pCamera->PayloadSize.GetValue()); + + // We won't use image buffers greater than ImageSize + pStreamGrabber->MaxBufferSize.SetValue(ImageSize); + + // We won't queue more than nbBuffers image buffers at a time + pStreamGrabber->MaxNumBuffer.SetValue(nbBuffers); + + pStreamGrabber->PrepareGrab(); + + for (int i = 0; i < nbBuffers; ++i){ + + //ppBuffers[i] = new unsigned char[ImageSize]; + if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){ + + ppBuffersUC[i] = new uint8_t[ImageSize]; + handles[i] = pStreamGrabber->RegisterBuffer(ppBuffersUC[i], ImageSize); + + } + + if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){ + + ppBuffersUS[i] = new uint16_t[ImageSize]; + handles[i] = pStreamGrabber->RegisterBuffer(ppBuffersUS[i], ImageSize); + + } + + pStreamGrabber->QueueBuffer(handles[i], NULL); + } + + return true; + + }catch (GenICam::GenericException &e){ + + // Error handling. + BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription(); + cout << "An exception occurred." << e.GetDescription() << endl; + return false; + + } + + + } + } + + return false; + +} + +void CameraGigePylon::getAvailablePixelFormats() { + + vector pixfmt; + + if(pCamera != NULL) { + + if(pCamera->IsOpen()) { + + INodeMap *nodemap = pCamera->GetNodeMap(); + // Access the PixelFormat enumeration type node. + CEnumerationPtr pixelFormat( nodemap->GetNode( "PixelFormat")); + // Check if the pixel format Mono8 is available. + if(IsAvailable(pixelFormat->GetEntryByName( "Mono8"))) + pixfmt.push_back("MONO8"); + + // Check if the pixel format Mono12 is available. + if(IsAvailable(pixelFormat->GetEntryByName( "Mono12"))) + pixfmt.push_back("MONO12"); + + std::cout << endl << ">> Available pixel formats :" << endl; + EParser fmt; + + for( int i = 0; i != pixfmt.size(); i++ ) { + if(fmt.isEnumValue(pixfmt.at(i))) { + std::cout << "- " << pixfmt.at(i) << " available --> ID : " << fmt.parseEnum(pixfmt.at(i)) << endl; + } + } + } + } +} + +void CameraGigePylon::grabCleanse(){ + + if(pCamera){ + + if(pCamera->IsOpen()){ + + try{ + + // Flush the input queue, grabbing may have failed + BOOST_LOG_SEV(logger,notification) << "Flush the input queue."; + + if(pStreamGrabber != NULL) { + + pStreamGrabber->CancelGrab(); + + // Consume all items from the output queue + GrabResult Result; + + while (pStreamGrabber->GetWaitObject().Wait(0)){ + + pStreamGrabber->RetrieveResult(Result); + + //if (Result.Status() == Canceled) + //BOOST_LOG_SEV(logger,notification) << "Got canceled buffer."; + + } + + // Deregister and free buffers + for(int i = 0; i < nbBuffers; ++i){ + + pStreamGrabber->DeregisterBuffer(handles[i]); + + //BOOST_LOG_SEV(logger,notification) << "Deregister and free buffer n° "<< i ; + + if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){ + + delete [] ppBuffersUC[i]; + + }else if (pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){ + + delete [] ppBuffersUS[i]; + + } + } + + // Free all resources used for grabbing + pStreamGrabber->FinishGrab(); + pStreamGrabber->Close(); + + if(pStreamGrabber != NULL){ + delete pStreamGrabber; + pStreamGrabber = NULL; + } + + if(pCamera != NULL) { + pCamera->Close(); + delete pCamera; + pCamera = NULL; + } + + if(pTlFactory != NULL) + pTlFactory->ReleaseTl(pTl); + pTlFactory = NULL; + } + + }catch (GenICam::GenericException &e){ + + // Error handling. + BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription(); + cout << "An exception occurred." << e.GetDescription() << endl; + + } + } + } +} + +bool CameraGigePylon::acqStart(){ + + if(pCamera!=NULL) { + pCamera->AcquisitionStart.Execute(); + return true; + } + + return false; + +} + +void CameraGigePylon::acqStop(){ + + pCamera->AcquisitionStop.Execute(); + +} + +bool CameraGigePylon::grabImage(Frame &newFrame){ + + bool res = true; + + if(pStreamGrabber->GetWaitObject().Wait(3000)){ + + // Get an item from the grabber's output queue + if(!pStreamGrabber->RetrieveResult(result)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to retrieve an item from the output queue."; + res = false; + + } + + CGrabResultPtr ptrGrabResult; + + if(result.Succeeded()){ + + //Timestamping. + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + + Mat newImg; + + if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){ + + newImg = Mat(pCamera->Height.GetValue(), pCamera->Width.GetValue(), CV_8UC1, Scalar(0)); + + }else if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){ + + newImg = Mat(pCamera->Height.GetValue(), pCamera->Width.GetValue(), CV_16UC1, Scalar(0)); + + } + + memcpy(newImg.ptr(), result.Buffer(), pCamera->PayloadSize.GetValue()); + + newFrame = Frame( newImg, + pCamera->GainRaw.GetValue(), + (double)pCamera->ExposureTimeAbs.GetValue(), + to_iso_extended_string(time)); + + newFrame.mFps = pCamera->AcquisitionFrameRateAbs.GetValue(); + newFrame.mFrameNumber = mFrameCounter; + mFrameCounter++; + + if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){ + + newFrame.mFormat = MONO8; + newFrame.mSaturatedValue = 255; + + }else if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){ + + newFrame.mFormat = MONO12; + newFrame.mSaturatedValue = 4095; + + } + + }else{ + + BOOST_LOG_SEV(logger,fail) << "Fail to grab a frame : " << result.GetErrorDescription(); + res = false; + + } + + // Requeue the buffer + pStreamGrabber->QueueBuffer( result.Handle(), result.Context() ); + + }else{ + + BOOST_LOG_SEV(logger,fail) <<"Fail to grab a frame (timeout) : " << result.GetErrorDescription(); + res = false; + } + + return res; + +} + +bool CameraGigePylon::setSize(int width, int height, bool customSize) { + + if(pCamera) { + + try{ + + if (pCamera->IsAttached() && pCamera->IsOpen()){ + + if(customSize) { + + BOOST_LOG_SEV(logger,notification) << "Set custom size to : " << width << "x" << height; + pCamera->Width.SetValue(width); + pCamera->Height.SetValue(height); + + // Default is maximum size + }else { + + BOOST_LOG_SEV(logger,notification) << "Set size to : " << pCamera->Width.GetMax() << "x" << pCamera->Height.GetMax(); + pCamera->Width.SetValue(pCamera->Width.GetMax()); + pCamera->Height.SetValue(pCamera->Height.GetMax()); + } + + return true; + + }else{ + + BOOST_LOG_SEV(logger,fail) << "Can't access size image. Camera not opened or not attached." << endl; + + } + + }catch (GenICam::GenericException &e){ + + // Error handling + BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription(); + + } + } + + return false; + +} + +bool CameraGigePylon::grabSingleImage(Frame &frame, int camID){ + + try { + + // Enumerate GigE cameras + pTlFactory = &CTlFactory::GetInstance(); + pTl = pTlFactory->CreateTl(CBaslerGigECamera ::DeviceClass()); + + if (((camID + 1 ) > pTl->EnumerateDevices(devices)) || camID < 0){ + + throw "Camera ID not correct. Can't be found."; + + }else { + + cout << ">> Camera (ID:" << camID << ") found. " << endl; + + } + + // Create an instant camera object with the correct id camera device. + CInstantCamera camera( CTlFactory::GetInstance().CreateDevice(devices[camID].GetFullName())); + + INodeMap& nodemap = camera.GetNodeMap(); + + // Open the camera for accessing the parameters. + camera.Open(); + + CIntegerPtr width(nodemap.GetNode("Width")); + CIntegerPtr height(nodemap.GetNode("Height")); + + if(frame.mWidth > 0 && frame.mHeight > 0) { + + width->SetValue(frame.mWidth); + height->SetValue(frame.mHeight); + + }else{ + + // Set width and height to the maximum sensor's size. + width->SetValue(width->GetMax()); + height->SetValue(height->GetMax()); + + } + + // Set pixel format. + // Access the PixelFormat enumeration type node. + CEnumerationPtr pixelFormat(nodemap.GetNode("PixelFormat")); + + if(frame.mFormat == MONO8) { + + if(IsAvailable(pixelFormat->GetEntryByName("Mono8"))){ + pixelFormat->FromString("Mono8"); + + }else{ + cout << ">> Fail to set pixel format to MONO_8" << endl; + return false; + } + + }else if(frame.mFormat == MONO12){ + + if(IsAvailable(pixelFormat->GetEntryByName("Mono12"))){ + pixelFormat->FromString("Mono12"); + + }else{ + cout << ">> Fail to set pixel format to MONO_12" << endl; + return false; + } + + }else{ + + cout << ">> No depth specified for the frame container." << endl; + return false; + } + + CEnumerationPtr exposureAuto( nodemap.GetNode( "ExposureAuto")); + if ( IsWritable( exposureAuto)){ + exposureAuto->FromString("Off"); + cout << ">> Exposure auto disabled." << endl; + } + + // Set exposure. + CIntegerPtr ExposureTimeRaw(nodemap.GetNode("ExposureTimeRaw")); + + if(ExposureTimeRaw.IsValid()) { + + if(frame.mExposure >= ExposureTimeRaw->GetMin() && frame.mExposure <= ExposureTimeRaw->GetMax()) { + + ExposureTimeRaw->SetValue(frame.mExposure); + + }else { + + ExposureTimeRaw->SetValue(ExposureTimeRaw->GetMin()); + cout << ">> Exposure has been setted with the minimum available value." << endl; + cout << ">> The available exposure range is [" << ExposureTimeRaw->GetMin() << "-" << ExposureTimeRaw->GetMax() << "] (us)" << endl; + } + + }else { + + cout << ">> Fail to set exposure value." << endl; + return false; + } + + // Disable auto gain. + + CEnumerationPtr gainAuto( nodemap.GetNode( "GainAuto")); + if ( IsWritable( gainAuto)){ + gainAuto->FromString("Off"); + cout << ">> Gain auto disabled." << endl; + } + + // Set gain. + // Access the GainRaw integer type node. This node is available for Firewire and GigE Devices. + CIntegerPtr gainRaw(nodemap.GetNode("GainRaw")); + if(gainRaw.IsValid()) { + + if(frame.mGain >= gainRaw->GetMin() && frame.mGain <= gainRaw->GetMax()) { + + gainRaw->SetValue(frame.mGain); + + }else { + + gainRaw->SetValue(gainRaw->GetMin()); + cout << ">> Gain has been setted to the minimum available value." << endl; + cout << ">> The available gain range is [" << gainRaw->GetMin() << "-" << gainRaw->GetMax() << "]" << endl; + } + } + + camera.Close(); + + // This smart pointer will receive the grab result data. + CGrabResultPtr ptrGrabResult; + + cout << ">> Acquisition in progress... (Please wait)" << endl; + + int timeout = 1000 + frame.mExposure/1000; + + camera.GrabOne(timeout , ptrGrabResult); + + Mat newImg; + + // Image grabbed successfully ? + if(ptrGrabResult->GrabSucceeded()){ + + //Timestamping. + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + string acqDateInMicrosec = to_iso_extended_string(time); + + frame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time)); + frame.mFps = 0; + + if(ptrGrabResult->GetPixelType()== PixelType_Mono8) { + + newImg = Mat(ptrGrabResult->GetHeight(), ptrGrabResult->GetWidth(), CV_8UC1, Scalar(0)); + + }else if(ptrGrabResult->GetPixelType()== PixelType_Mono12) { + + newImg = Mat(ptrGrabResult->GetHeight(), ptrGrabResult->GetWidth(), CV_16UC1, Scalar(0)); + + } + + memcpy(newImg.ptr(), ptrGrabResult->GetBuffer(), ptrGrabResult->GetPayloadSize()); + + newImg.copyTo(frame.mImg); + + return true; + + } + + }catch(GenICam::GenericException &e) { + + BOOST_LOG_SEV(logger,fail) << e.GetDescription(); + + }catch(exception& e) { + + BOOST_LOG_SEV(logger,fail) << e.what(); + + }catch(const char * msg) { + + cout << msg << endl; + BOOST_LOG_SEV(logger,fail) << msg; + + } + + if(pTlFactory != NULL) { + pTlFactory->ReleaseTl(pTl); + pTlFactory = NULL; + } + + return false; +} + +void CameraGigePylon::getExposureBounds(double &eMin, double &eMax){ + + INodeMap *nodemap = pCamera->GetNodeMap(); + + CIntegerPtr exposureTimeRaw(nodemap->GetNode("ExposureTimeRaw")); + + if(exposureTimeRaw.IsValid()) { + + eMin = exposureTimeRaw->GetMin(); + eMax = exposureTimeRaw->GetMax(); + + } + +} + +void CameraGigePylon::getGainBounds(int &gMin, int &gMax){ + + INodeMap *nodemap = pCamera->GetNodeMap(); + + CIntegerPtr gainRaw(nodemap->GetNode("GainRaw")); + + if(gainRaw.IsValid()) { + + gMin = gainRaw->GetMin(); + gMax = gainRaw->GetMax(); + + } + +} + +bool CameraGigePylon::getPixelFormat(CamPixFmt &format){ + + if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono8){ + + format = MONO8; + return true; + + }else if(pCamera->PixelFormat.GetValue() == PixelFormat_Mono12){ + + format = MONO12; + + return true; + + } + + return false; + +} + +bool CameraGigePylon::getFrameSize(int &w , int &h) { + + if(pCamera){ + + try{ + + if (pCamera->IsAttached() && pCamera->IsOpen()){ + + w = pCamera->Width.GetValue(); + h = pCamera->Height.GetValue(); + + return true; + + }else{ + + BOOST_LOG_SEV(logger,fail) << "Can't access width image. Camera not opened or not attached." << endl; + + } + + }catch (GenICam::GenericException &e){ + + // Error handling + BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription(); + + } + } + + return false; + +} + +bool CameraGigePylon::getFPS(double &value) { + + if(pCamera!=NULL) { + value = pCamera->ResultingFrameRateAbs.GetValue(); + return true; + } + + value = 0; + return false; + +} + +string CameraGigePylon::getModelName(){ + return ""; +} + +bool CameraGigePylon::setExposureTime(double exposition) { + + if(pCamera){ + + try{ + + if( pCamera->IsAttached() && pCamera->IsOpen() ){ + + // Check whether auto exposure is available + if (IsWritable( pCamera->ExposureAuto)){ + + // Disable auto exposure. + cout << "Disable ExposureAuto." << endl; + pCamera->ExposureAuto.SetValue(ExposureAuto_Off); + + } + + pCamera->ExposureTimeAbs = exposition; + + }else{ + + std::cout << "Camera not opened or not attached" << endl; + } + + return true; + + }catch (GenICam::GenericException &e){ + + // Error handling + BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription(); + cout << endl << ">> " << e.GetDescription() << endl; + return false; + } + } + + return false; + +} + +bool CameraGigePylon::setGain(int gain){ + + if(pCamera){ + try{ + + if( pCamera->IsAttached() && pCamera->IsOpen() ){ + + // Check whether auto exposure is available + if (IsWritable( pCamera->GainAuto)){ + + // Disable auto exposure. + cout << "Disable GainAuto." << endl; + pCamera->GainAuto.SetValue(GainAuto_Off); + + } + + pCamera->GainRaw = gain; + + }else{ + + BOOST_LOG_SEV(logger,fail) << "Camera not opened or not attached"; + + } + + return true; + + }catch (GenICam::GenericException &e){ + + // Error handling + BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription(); + cout << endl << ">> " << e.GetDescription() << endl; + return false; + } + } + + return false; + +} + +bool CameraGigePylon::setFPS(double fps){ + + pCamera->AcquisitionFrameRateAbs = fps; + return true; +} + +bool CameraGigePylon::setPixelFormat(CamPixFmt format){ + + Basler_GigECamera::PixelFormatEnums fpix; + + if(format == MONO8 ){ + + fpix = PixelFormat_Mono8; + + } + else if (format == MONO12 ){ + + fpix = PixelFormat_Mono12; + + } + + if (pCamera){ + + try{ + if(pCamera->IsAttached() && pCamera->IsOpen()){ + + pCamera->PixelFormat.SetValue(fpix); + + }else{ + + BOOST_LOG_SEV(logger,fail) << "Camera not opened or not attached"; + + } + } + catch (GenICam::GenericException &e){ + + // Error handling + BOOST_LOG_SEV(logger,fail) << "An exception occurred." << e.GetDescription(); + cout << endl << ">> " << e.GetDescription() << endl; + + } + + return true; + } + + return false; + +} + +double CameraGigePylon::getExposureTime(){ + + if(pCamera!=0) + return pCamera->ExposureTimeAbs.GetValue(); + else + return 0; + +} + +/* +int CameraGigePylon::getGain(){ + + return (int)(pCamera->GainRaw.GetValue()); + +}*/ + + +#endif diff --git a/CameraGigePylon.h b/CameraGigePylon.h new file mode 100644 index 0000000..27b75b7 --- /dev/null +++ b/CameraGigePylon.h @@ -0,0 +1,155 @@ +/* + CameraGigePylon.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** + * \file CameraGigePylon.cpp + * \author Yoan Audureau -- FRIPON-GEOPS-UPSUD + * \version 1.0 + * \date 03/07/2014 + * \brief Use Pylon library to pilot GigE Cameras. + */ + +#pragma once + +#include "config.h" + +#ifdef USE_PYLON + + #include "Frame.h" + #include "TimeDate.h" + #include "Conversion.h" + #include "SaveImg.h" + #include "Camera.h" + #include "ECamPixFmt.h" + #include "EParser.h" + #include + #include + #include + #include + #include "ELogSeverityLevel.h" + + #include + #include + #include + + using namespace Pylon; + using namespace GenApi; + using namespace cv; + using namespace std; + using namespace Basler_GigECameraParams; + + static const uint32_t nbBuffers = 20; // Buffer's number used for grabbing + + class CameraGigePylon : public Camera { + + private : + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public : + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("CameraGigePylon")); + + } + + } initializer; + + // Automagically call PylonInitialize and PylonTerminate to ensure the pylon runtime system + // is initialized during the lifetime of this object. + Pylon::PylonAutoInitTerm autoInitTerm; + + uint8_t* ppBuffersUC[nbBuffers]; // Buffer for the grabbed images in 8 bits format. + uint16_t* ppBuffersUS[nbBuffers]; // Buffer for the grabbed images in 16 bits format. + StreamBufferHandle handles[nbBuffers]; + CTlFactory *pTlFactory; + ITransportLayer *pTl; // Pointer on the transport layer. + CBaslerGigECamera *pCamera; // Pointer on basler camera. + CBaslerGigECamera::StreamGrabber_t *pStreamGrabber; + DeviceInfoList_t devices; + GrabResult result; + bool connectionStatus; + int mFrameCounter; + + public: + + CameraGigePylon(); + + ~CameraGigePylon(void); + + vector> getCamerasList(); + + bool listCameras(); + + bool createDevice(int id); + + bool getDeviceNameById(int id, string &device); + + bool grabInitialization(); + + void grabCleanse(); + + bool acqStart(); + + void acqStop(); + + bool grabImage(Frame& newFrame); + + bool grabSingleImage(Frame &frame, int camID); + + void getExposureBounds(double &eMin, double &eMax); + + void getGainBounds(int &gMin, int &gMax); + + bool getPixelFormat(CamPixFmt &format); + + bool getFrameSize(int &w, int &h); + + bool getFPS(double &value); + + string getModelName(); + + bool setExposureTime(double exp); + + bool setGain(int gain); + + bool setFPS(double fps); + + bool setPixelFormat(CamPixFmt format); + + double getExposureTime(); + + bool setSize(int width, int height, bool customSize); + + void getAvailablePixelFormats(); + + }; + +#endif diff --git a/CameraGigeTis.cpp b/CameraGigeTis.cpp new file mode 100644 index 0000000..add4d5e --- /dev/null +++ b/CameraGigeTis.cpp @@ -0,0 +1,892 @@ +/* + CameraGigeTis.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 21/01/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraGigeTis.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 21/01/2015 +* \brief Use Imaging source sdk to pilot GigE Cameras. +* https://wiki.gnome.org/action/show/Projects/Aravis?action=show&redirect=Aravis +*/ + +#include "CameraGigeTis.h" + +#ifdef WINDOWS + + boost::log::sources::severity_logger< LogSeverityLevel > CameraGigeTis::logger; + + CameraGigeTis::Init CameraGigeTis::initializer; + + CameraGigeTis::CameraGigeTis(){ + + if(!DShowLib::InitLibrary()) + throw "Fail DShowLib::InitLibrary()."; + + m_pGrabber = new DShowLib::Grabber(); + mFrameCounter = 0; + mGain = 0; + mExposure = 0; + mFPS = 30; + mImgDepth = MONO8; + mSaturateVal = 0; + mGainMin = -1; + mGainMax = -1; + mExposureMin = -1; + mExposureMax = -1; + + mExposureAvailable = true; + mGainAvailable = true; + mInputDeviceType = CAMERA; + + } + + vector> CameraGigeTis::getCamerasList() { + + vector> camerasList; + + // Retrieve a list with the video capture devices connected to the computer. + pVidCapDevList = m_pGrabber->getAvailableVideoCaptureDevices(); + + // Print available devices. + for(int i = 0; i < pVidCapDevList->size(); i++) { + + LARGE_INTEGER iSerNum; + if(pVidCapDevList->at(i).getSerialNumber(iSerNum.QuadPart) == false) iSerNum.QuadPart = 0; + std::ostringstream ossSerNum; + ossSerNum << std::hex << iSerNum.QuadPart; + string SerNum = ossSerNum.str(); + + pair c; + c.first = i; + c.second = "NAME[" + pVidCapDevList->at(i).getName() + "] S/N[" + SerNum + "] SDK[TIS]"; + camerasList.push_back(c); + + } + + return camerasList; + + } + + // https://valelab.ucsf.edu/svn/micromanager2/branches/micromanager1.3/DeviceAdapters/TISCam/SimplePropertyAccess.cpp + DShowLib::tIVCDRangePropertyPtr CameraGigeTis::getPropertyRangeInterface( _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr& pItems, const GUID& id ){ + + GUID itemID = id; + GUID elemID = DShowLib::VCDElement_Value; + + DShowLib::tIVCDPropertyElementPtr pFoundElement = pItems->findElement( itemID, elemID ); + + if( pFoundElement != 0 ){ + + DShowLib::tIVCDRangePropertyPtr pRange; + + if( pFoundElement->getInterfacePtr( pRange ) != 0 ) { + return pRange; + } + } + return 0; + } + + bool CameraGigeTis::propertyIsAvailable( const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer ){ + + return m_pItemContainer->findItem( id ) != 0; + + } + + long CameraGigeTis::getPropertyValue( const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer ){ + + long rval = 0; + DShowLib::tIVCDRangePropertyPtr pRange = getPropertyRangeInterface( m_pItemContainer, id ); + if( pRange != 0 ){ + rval = pRange->getValue(); + } + return rval; + + } + + void CameraGigeTis::setPropertyValue( const GUID& id, long val, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer ){ + + DShowLib::tIVCDRangePropertyPtr pRange = getPropertyRangeInterface( m_pItemContainer, id ); + + if( pRange != 0 ) { + pRange->setValue( val ); + } + } + + long CameraGigeTis::getPropertyRangeMin( const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer ){ + + long rval = 0; + DShowLib::tIVCDRangePropertyPtr pRange = getPropertyRangeInterface( m_pItemContainer, id ); + + if( pRange != 0 ){ + rval = pRange->getRangeMin(); + } + return rval; + } + + long CameraGigeTis::getPropertyRangeMax(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer){ + + long rval = 0; + DShowLib:: tIVCDRangePropertyPtr pRange = getPropertyRangeInterface( m_pItemContainer, id ); + + if( pRange != 0 ) { + rval = pRange->getRangeMax(); + } + return rval; + } + + bool CameraGigeTis::setFpsToLowerValue() { + + try { + + // Get list of possible format. + DShowLib::Grabber::tFrameRateListPtr VidFpsListPtr = m_pGrabber->getAvailableFrameRates(); + double chooseValue = 0.0; + cout << "Available FPS : | " ; + for(int i = 0; i < VidFpsListPtr->size(); i++) { + + double fps = Conversion::roundToNearest((1.0/((float)VidFpsListPtr->at(i) / 1000.0)), 0.25); + cout << fps << " | "; + if(chooseValue == 0.0) { + chooseValue = fps; + }else { + if(fps < chooseValue) { + chooseValue = fps; + } + } + + } + cout << endl; + + if(chooseValue != 0.0) { + mFPS = chooseValue; + cout << ">> Fps setted to the lower value : " << chooseValue << endl; + m_pGrabber->setFPS(chooseValue); + return true; + } + + }catch(exception& e) { + + BOOST_LOG_SEV(logger,critical) << "An error occured on set lower fps operation."; + BOOST_LOG_SEV(logger,critical) << e.what(); + + } + + return false; + + } + + bool CameraGigeTis::setFPS(double value) { + + try { + + // Get list of possible format. + DShowLib::Grabber::tFrameRateListPtr VidFpsListPtr = m_pGrabber->getAvailableFrameRates(); + double chooseValue = 0.0; + double resPrev = 0.0; + cout << "Available FPS : | " ; + for(int i = 0; i < VidFpsListPtr->size(); i++) { + + double fps = Conversion::roundToNearest((1.0/((float)VidFpsListPtr->at(i) / 1000.0)), 0.25); + cout << fps << " | "; + if(resPrev == 0.0) { + resPrev = abs(fps - value); + chooseValue = fps; + }else { + if(resPrev > abs(fps - value)) { + resPrev = abs(fps - value); + chooseValue = fps; + } + } + + } + cout << endl; + + if(chooseValue != 0.0) { + mFPS = chooseValue; + cout << ">> Set fps to : " << chooseValue << endl; + m_pGrabber->setFPS(chooseValue); + return true; + } + + }catch(exception& e) { + + BOOST_LOG_SEV(logger,critical) << "An error occured on set fps operation."; + BOOST_LOG_SEV(logger,critical) << e.what(); + + } + + return false; + + } + + bool CameraGigeTis::createDevice(int id){ + + // Retrieve a list with the video capture devices connected to the computer. + pVidCapDevList = m_pGrabber->getAvailableVideoCaptureDevices(); + + if(pVidCapDevList == 0 || pVidCapDevList->empty()){ + + BOOST_LOG_SEV(logger,fail) << "No device available."; + return false; + + }else { + + if(((id+1)>pVidCapDevList->size()) || id < 0) { + + BOOST_LOG_SEV(logger,fail) << "Camera ID not correct. Can't be found."; + return false; + + } + + // Open the selected video capture device. + m_pGrabber->openDev(pVidCapDevList->at(id)); + return true; + + } + } + + bool CameraGigeTis::setPixelFormat(CamPixFmt format) { + + mImgDepth = format; + + vector mono12, mono8; + + // Get list of possible format. + DShowLib::Grabber::tVidFmtListPtr VidFmtListPtr = m_pGrabber->getAvailableVideoFormats(); + string dateDelimiter = " "; + cout << "Available Format : " << endl; + for(int i = 0; i < VidFmtListPtr->size(); i++) { + + string s = VidFmtListPtr->at(i).c_str(); + string s1 = s.substr(0, s.find(dateDelimiter)); + cout << "-> (" << Conversion::intToString(i) << ") " << VidFmtListPtr->at(i).c_str() << endl; + + if(s1 == "Y8" || s1 == "Y800"){ + + mono8.push_back(VidFmtListPtr->at(i).c_str()); + + }else if(s1 == "Y12" || s1 == "Y16"){ + + mono12.push_back(VidFmtListPtr->at(i).c_str()); + + } + } + + cout << endl; + + switch(format){ + + case MONO8 : + + if(mono8.size() == 0) + return false; + + m_pGrabber->setVideoFormat(mono8.front());//"Y8 (1280x960-1280x960)"); + + // Set the image buffer format to eY800. eY800 means monochrome, 8 bits (1 byte) per pixel. + // Let the sink create a matching MemBufferCollection with 1 buffer. + pSink = DShowLib::FrameHandlerSink::create( DShowLib::eY800, NUMBER_OF_BUFFERS ); + + break; + + case MONO12 : + + if(mono12.size() == 0) + return false; + + m_pGrabber->setVideoFormat(mono12.front());//"Y16 (1280x960-1280x960)"); + + // Disable overlay. + // http://www.theimagingsourceforums.com/archive/index.php/t-319880.html + m_pGrabber->setOverlayBitmapPathPosition(DShowLib::ePP_NONE); + + // Set the image buffer format to eY16. eY16 means monochrome, 16 bits (2 byte) per pixel. + // Let the sink create a matching MemBufferCollection with 1 buffer. + pSink = DShowLib::FrameHandlerSink::create( DShowLib::eY16, NUMBER_OF_BUFFERS ); + + break; + + default: + + return false; + + break; + } + + return true; + + } + + bool CameraGigeTis::getFPS(double &value){ + + value = m_pGrabber->getFPS(); + return true; + + } + + void CameraGigeTis::getExposureBounds(double &eMin, double &eMax) { + + DShowLib::tIVCDAbsoluteValuePropertyPtr pExposureRange; + + pExposureRange = NULL; + + DShowLib::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties(); + + if( pItems != 0 ) { + + // Try to find the exposure item. + DShowLib::tIVCDPropertyItemPtr pExposureItem = pItems->findItem( DShowLib::VCDID_Exposure ); + + if( pExposureItem != 0 ) { + + // Try to find the value and auto elements + DShowLib::tIVCDPropertyElementPtr pExposureValueElement = pExposureItem->findElement( DShowLib::VCDElement_Value ); + + // If a value element exists, try to acquire a range interface + if( pExposureValueElement != 0 ) { + + pExposureValueElement->getInterfacePtr( pExposureRange ); + + eMin = pExposureRange->getRangeMin() * 1000000.0; // in us + eMax = pExposureRange->getRangeMax() * 1000000.0; // in us + + } + } + } + } + + void CameraGigeTis::getGainBounds(int &gMin, int &gMax) { + + // Get properties. + _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties(); + + gMin = (int)getPropertyRangeMin(DShowLib::VCDID_Gain, pItems); + gMax = (int)getPropertyRangeMax(DShowLib::VCDID_Gain, pItems); + + } + + // http://www.theimagingsourceforums.com/faq.php?faq=ic_programming + bool CameraGigeTis::setExposureTime(double value) { + + // Conversion in seconds + value = value / 1000000.0; + + bool bOK = false; + + DShowLib::tIVCDAbsoluteValuePropertyPtr pExposureRange; + DShowLib::tIVCDSwitchPropertyPtr pExposureAuto; + + pExposureRange = NULL; + pExposureAuto = NULL; + + DShowLib::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties(); + + if( pItems != 0 ) { + // Try to find the exposure item. + DShowLib::tIVCDPropertyItemPtr pExposureItem = pItems->findItem( DShowLib::VCDID_Exposure ); + if( pExposureItem != 0 ) { + // Try to find the value and auto elements + DShowLib::tIVCDPropertyElementPtr pExposureValueElement = pExposureItem->findElement( DShowLib::VCDElement_Value ); + DShowLib::tIVCDPropertyElementPtr pExposureAutoElement = pExposureItem->findElement( DShowLib::VCDElement_Auto ); + + // If an auto element exists, try to acquire a switch interface + if( pExposureAutoElement != 0 ) { + pExposureAutoElement->getInterfacePtr( pExposureAuto ); + pExposureAuto->setSwitch(false); // Disable auto, otherwise we can not set exposure. + } + + // If a value element exists, try to acquire a range interface + if( pExposureValueElement != 0 ) { + + pExposureValueElement->getInterfacePtr( pExposureRange ); + + mExposureMin = pExposureRange->getRangeMin(); + mExposureMax = pExposureRange->getRangeMax(); + + cout << "Available exposure range : [ " << mExposureMin << " - "<< mExposureMax << " ]" << endl; + + if ( value <= mExposureMin ) { + value = mExposureMin + 0.000010; + BOOST_LOG_SEV(logger,warning) << "EXPOSURE TIME setted to " << value << ". Available range [" << mExposureMin << " - " << mExposureMax<< "]"; + } else if( value >= mExposureMax ) { + value = mExposureMax; + BOOST_LOG_SEV(logger,warning) << "EXPOSURE TIME setted to " << value << ". Available range [" << mExposureMin << " - " << mExposureMax<< "]"; + } + + // Here we set the the exposure value. + cout << ">> Set exposure time to : " << value << endl; + pExposureRange->setValue( value); + mExposure = value * 1000000.0; + bOK = true; + } + } + } + + return bOK; + } + + void CameraGigeTis::getAvailablePixelFormats() { + + if(m_pGrabber != NULL) { + + vector pixfmt; + EParser fmt; + DShowLib::Grabber::tVidFmtListPtr pVidFmtList = m_pGrabber->getAvailableVideoFormats(); + + // List the available video formats. + for(DShowLib::Grabber::tVidFmtListPtr::value_type::iterator it = pVidFmtList->begin(); it != pVidFmtList->end(); ++it) + { + string pf = it->c_str(); + + if(pf.find("Y8") != std::string::npos) { + pixfmt.push_back("MONO8"); + } + + if(pf.find("Y16") != std::string::npos) { + pixfmt.push_back("MONO12"); + } + + } + + std::cout << endl << ">> Available pixel formats :" << endl; + + for( int i = 0; i != pixfmt.size(); i++ ) { + if(fmt.isEnumValue(pixfmt.at(i))) { + std::cout << "- " << pixfmt.at(i) << " available --> ID : " << fmt.parseEnum(pixfmt.at(i)) << endl; + } + } + } + + } + + bool CameraGigeTis::setGain(int value) { + + bool bOK = false; + DShowLib::tIVCDSwitchPropertyPtr pGainAuto; + + pGainAuto = NULL; + + DShowLib::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties(); + + if( pItems != 0 ) { + + // Try to find the gain item. + DShowLib::tIVCDPropertyItemPtr pGainItem = pItems->findItem( DShowLib::VCDID_Gain ); + + if( pGainItem != 0 ) { + + // Try to find auto elements + DShowLib::tIVCDPropertyElementPtr pGainAutoElement = pGainItem->findElement( DShowLib::VCDElement_Auto ); + + // If an auto element exists, try to acquire a switch interface + if( pGainAutoElement != 0 ) { + pGainAutoElement->getInterfacePtr( pGainAuto ); + pGainAuto->setSwitch(false); // Disable auto, otherwise we can not set gain. + } + + mGainMin = (int)getPropertyRangeMin(DShowLib::VCDID_Gain, pItems); + mGainMax = (int)getPropertyRangeMax(DShowLib::VCDID_Gain, pItems); + + cout << "Available gain range : [ " << mGainMin << " - "<< mGainMax << " ]" << endl; + + if(value > mGainMax || value < mGainMin){ + + BOOST_LOG_SEV(logger,warning) << "Fail to set GAIN. Available range value is " << mGainMin << " to " << mGainMax; + cout << endl << ">> Fail to set GAIN. Available range value is " << mGainMin << " to " << mGainMax << endl; + value = mGainMin; + } + + setPropertyValue(DShowLib::VCDID_Gain, (long)value, pItems); + cout << ">> Set gain to : " << value << endl; + mGain = value; + bOK = true; + + } + } + return bOK; + + } + + bool CameraGigeTis::grabInitialization() { + + // Set the sink. + m_pGrabber->setSinkType(pSink); + + // We use snap mode. + pSink->setSnapMode(true); + + // Prepare the live mode, to get the output size if the sink. + if(!m_pGrabber->prepareLive(false)){ + + std::cerr << "Could not render the VideoFormat into a eY800 sink."; + return false; + } + + // Retrieve the output type and dimension of the handler sink. + // The dimension of the sink could be different from the VideoFormat, when + // you use filters. + DShowLib::FrameTypeInfo info; + pSink->getOutputFrameType(info); + + // Allocate NUMBER_OF_BUFFERS image buffers of the above (info) buffer size. + for (int ii = 0; ii < NUMBER_OF_BUFFERS; ++ii) { + pBuf[ii] = new BYTE[info.buffersize]; + assert(pBuf[ii]); + } + + // Create a new MemBuffer collection that uses our own image buffers. + pCollection = DShowLib::MemBufferCollection::create(info, NUMBER_OF_BUFFERS, pBuf); + if (pCollection == 0) return false; + if (!pSink->setMemBufferCollection(pCollection)) return false; + + if (!m_pGrabber->startLive(false)) return false; + + return true; + + } + + bool CameraGigeTis::acqStart() { + + if (!m_pGrabber->isLive()) { + + m_pGrabber->startLive(false); + + } + + pSink->snapImages(1,(DWORD)-1); + + return true; + + } + + bool CameraGigeTis::grabImage(Frame &newFrame) { + + Mat newImg; + + // Retrieve the output type and dimension of the handler sink. + // The dimension of the sink could be different from the VideoFormat, when + // you use filters. + DShowLib::FrameTypeInfo info; + pSink->getOutputFrameType(info); + + //Timestamping. + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + + switch(info.getBitsPerPixel()){ + + case 8 : + + { + + newImg = Mat(info.dim.cy, info.dim.cx, CV_8UC1, Scalar(0)); + pSink->snapImages(1,(DWORD)-1); + memcpy(newImg.ptr(), pBuf[0], info.buffersize); + + } + + break; + + case 16 : + + { + + newImg = Mat(info.dim.cy, info.dim.cx, CV_16UC1, Scalar(0)); + + pSink->snapImages(1,(DWORD)-1); + + memcpy(newImg.ptr(), pBuf[0], info.buffersize); + + unsigned short * ptr; + + double t = (double)getTickCount(); + + for(int i = 0; i < newImg.rows; i++){ + + ptr = newImg.ptr(i); + + for(int j = 0; j < newImg.cols; j++){ + + ptr[j] = ptr[j] >> 4; + + } + } + + + + } + + break; + + default: + + return false; + + break; + } + + if(newImg.data) { + + newFrame = Frame(newImg, mGain, mExposure, to_iso_extended_string(time)); + + newFrame.mFps = mFPS; + newFrame.mFormat = mImgDepth; + newFrame.mSaturatedValue = mSaturateVal; + + newFrame.mFrameNumber = mFrameCounter; + mFrameCounter++; + + return true; + + } + + return false; + + } + + bool CameraGigeTis::setSize(int width, int height, bool customSize) { + + if(customSize){ + + }else{ + + } + + return true; + + } + + void CameraGigeTis::acqStop() { + + m_pGrabber->stopLive(); + m_pGrabber->closeDev(); + + } + + void CameraGigeTis::grabCleanse() { + + if(m_pGrabber!=NULL) + m_pGrabber->closeDev(); + + } + + double CameraGigeTis::getExposureTime() { + + DShowLib::tIVCDAbsoluteValuePropertyPtr pExposureRange; + + pExposureRange = NULL; + + DShowLib::tIVCDPropertyItemsPtr pItems = m_pGrabber->getAvailableVCDProperties(); + + if( pItems != 0 ) { + + // Try to find the exposure item. + + DShowLib::tIVCDPropertyItemPtr pExposureItem = pItems->findItem( DShowLib::VCDID_Exposure ); + if( pExposureItem != 0 ) { + + // Try to find the value and auto elements + DShowLib::tIVCDPropertyElementPtr pExposureValueElement = pExposureItem->findElement( DShowLib::VCDElement_Value ); + + // If a value element exists, try to acquire a range interface + if( pExposureValueElement != 0 ) { + + pExposureValueElement->getInterfacePtr( pExposureRange ); + return (pExposureRange->getValue()/1000000.0); + + } + } + } + + return 0.0; + + } + + bool CameraGigeTis::getPixelFormat(CamPixFmt &format) { + + if(m_pGrabber->getVideoFormat().getBitsPerPixel() == 8) { + + format = MONO8; + + }else if(m_pGrabber->getVideoFormat().getBitsPerPixel() == 16 || m_pGrabber->getVideoFormat().getBitsPerPixel() == 12) { + + format = MONO12; + + }else { + + return false; + + } + + return true; + + } + + bool CameraGigeTis::grabSingleImage(Frame &frame, int camID) { + + if(!createDevice(camID)) + return false; + + if(!setPixelFormat(frame.mFormat)) + return false; + + // Set lower fps value. + if(!setFpsToLowerValue()) + return false; + + if(!setExposureTime(frame.mExposure)) + return false; + + if(!setGain(frame.mGain)) + return false; + + cout << ">> Acquisition in progress... (Please wait)" << endl; + + // We use snap mode. + pSink->setSnapMode(true); + + // Set the sink. + m_pGrabber->setSinkType(pSink); + + // Disable live mode. + m_pGrabber->prepareLive(false); + + // Retrieve the output type and dimension of the handler sink. + DShowLib::FrameTypeInfo info; + pSink->getOutputFrameType(info); + + Mat newImg; + DShowLib ::Error e; + + //Timestamping. + boost::posix_time::ptime time; + + switch(info.getBitsPerPixel()){ + + case 8 : + + { + + newImg = Mat(info.dim.cy, info.dim.cx, CV_8UC1, Scalar(0)); + BYTE* pBuf[1]; + // Allocate image buffers of the above calculate buffer size. + pBuf[0] = new BYTE[info.buffersize]; + + // Create a new MemBuffer collection that uses our own image buffers. + pCollection = DShowLib::MemBufferCollection::create( info, 1, pBuf ); + + if( pCollection == 0 || !pSink->setMemBufferCollection(pCollection)){ + + BOOST_LOG_SEV(logger,critical) << "Could not set the new MemBufferCollection."; + + }else { + + m_pGrabber->startLive(); + + e = pSink->snapImages(1); + + if( !e.isError()) { + time = boost::posix_time::microsec_clock::universal_time(); + memcpy(newImg.ptr(), pBuf[0], info.buffersize); + } + } + } + + break; + + case 16 : + + { + + newImg = Mat(info.dim.cy, info.dim.cx, CV_16UC1, Scalar(0)); + BYTE * pBuf[1]; + // Allocate image buffers of the above calculate buffer size. + pBuf[0] = new BYTE[info.buffersize]; + + // Create a new MemBuffer collection that uses our own image buffers. + pCollection = DShowLib::MemBufferCollection::create(info, 1, pBuf); + + if(pCollection == 0 || !pSink->setMemBufferCollection(pCollection)){ + + BOOST_LOG_SEV(logger,critical) << "Could not set the new MemBufferCollection."; + + }else { + + m_pGrabber->startLive(false); + + e = pSink->snapImages(1); + + if( !e.isError()) { + + time = boost::posix_time::microsec_clock::universal_time(); + memcpy(newImg.ptr(), pBuf[0], info.buffersize); + + // Shift. + unsigned short * ptr; + for(int i = 0; i < newImg.rows; i++){ + ptr = newImg.ptr(i); + for(int j = 0; j < newImg.cols; j++){ + ptr[j] = ptr[j] >> 4; + } + } + } + } + } + + break; + + default: + + return false; + + break; + } + + m_pGrabber->stopLive(); + + m_pGrabber->closeDev(); + + if( !e.isError()) { + + newImg.copyTo(frame.mImg); + frame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time)); + frame.mFps = 0; + + return true; + } + + return false; + } + + CameraGigeTis::~CameraGigeTis(){ + + DShowLib::ExitLibrary(); + if(m_pGrabber != NULL) + delete m_pGrabber; + + } + +#endif diff --git a/CameraGigeTis.h b/CameraGigeTis.h new file mode 100644 index 0000000..9b3a20f --- /dev/null +++ b/CameraGigeTis.h @@ -0,0 +1,158 @@ +/* CameraGigeTis.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 21/01/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraGigeTis.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 21/01/2015 +* \brief Use Imaging source sdk to pilot GigE Cameras. +*/ + +#pragma once + +#include "config.h" + +#ifdef WINDOWS + + #include "opencv2/highgui/highgui.hpp" + #include + #include + #include + #include "Frame.h" + #include "TimeDate.h" + #include "Camera.h" + #include "EParser.h" + #include "ECamPixFmt.h" + #include + #include + #include + #include + #include + #include + #include + #include + #include + #include + #include "ELogSeverityLevel.h" + #include "tisudshl.h" + #include + + #define NUMBER_OF_BUFFERS 1 + + using namespace cv; + using namespace std; + + class CameraGigeTis: public Camera { + + private: + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public: + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("CameraGigeTis")); + + } + + } initializer; + + DShowLib::Grabber::tVidCapDevListPtr pVidCapDevList; + DShowLib::tIVCDRangePropertyPtr getPropertyRangeInterface(_DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr& pItems, const GUID& id); + bool propertyIsAvailable(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer); + long getPropertyValue(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer); + void setPropertyValue(const GUID& id, long val, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer); + long getPropertyRangeMin(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer); + long getPropertyRangeMax(const GUID& id, _DSHOWLIB_NAMESPACE::tIVCDPropertyItemsPtr m_pItemContainer); + + DShowLib::Grabber* m_pGrabber; + DShowLib::tFrameHandlerSinkPtr pSink; + DShowLib::Grabber::tMemBufferCollectionPtr pCollection; + BYTE* pBuf[NUMBER_OF_BUFFERS]; + + int mFrameCounter; + int mGain; + double mExposure; + double mFPS; + CamPixFmt mImgDepth; + int mSaturateVal; + int mGainMin; + int mGainMax; + int mExposureMin; + int mExposureMax; + + public: + + CameraGigeTis(); + + ~CameraGigeTis(); + + vector> getCamerasList(); + + bool grabSingleImage(Frame &frame, int camID); + + bool createDevice(int id); + + bool setPixelFormat(CamPixFmt format); + + void getExposureBounds(double &eMin, double &eMax); + + void getGainBounds(int &gMin, int &gMax); + + bool getFPS(double &value); + + bool setExposureTime(double value); + + bool setGain(int value); + + bool setFPS(double value); + + bool setFpsToLowerValue(); + + bool grabInitialization(); + + bool acqStart(); + + bool grabImage(Frame &newFrame); + + void acqStop(); + + void grabCleanse(); + + bool getPixelFormat(CamPixFmt &format); + + double getExposureTime(); + + bool setSize(int width, int height, bool customSize); + + void getAvailablePixelFormats(); + + }; + +#endif diff --git a/CameraV4l2.cpp b/CameraV4l2.cpp new file mode 100644 index 0000000..4a67bed --- /dev/null +++ b/CameraV4l2.cpp @@ -0,0 +1,1942 @@ +/* + CameraV4l2.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 17/08/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraV4l2.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 17/08/2015 +*/ + +#include "CameraV4l2.h" + +#ifdef LINUX + + enum io_method { + IO_METHOD_READ, + IO_METHOD_MMAP, + IO_METHOD_USERPTR, + }; + + struct buffer + { + void *start; + size_t length; + }; + + struct v4l2_buffer buf; + + enum io_method io = IO_METHOD_MMAP; + + struct buffer *buffers = NULL; + unsigned int n_buffers; + int out_buf = 1; + int frame_count = 10; + int frame_number = 0; + + + boost::log::sources::severity_logger< LogSeverityLevel > CameraV4l2::logger; + CameraV4l2::Init CameraV4l2::initializer; + + CameraV4l2::CameraV4l2(){ + + io_method io = IO_METHOD_MMAP; + fd = -1; + out_buf = 1; + frame_count = 10; + frame_number = 0; + expMin = 0; + expMax = 0; + exp =0; + gain = 0; + gainMin = 0; + gainMax = 0; + mFrameCounter = 0; + mWidth = 640; + mHeight = 480; + n_buffers = 3; + + mExposureAvailable = true; + mGainAvailable = true; + mCustomSize = false; + mInputDeviceType = CAMERA; + + } + + CameraV4l2::~CameraV4l2(){ + + + } + + bool CameraV4l2::getInfos() { + + struct v4l2_capability caps = {}; + + // http://linuxtv.org/downloads/v4l-dvb-apis/vidioc-querycap.html + + if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) { + perror("Querying Capabilities"); + return false; + } + + cout << "Driver name : " << caps.driver << endl; + cout << "Device name : " << caps.card << endl; + cout << "Device location : " << caps.bus_info << endl; + printf ("Driver version : %u.%u.%u\n",(caps.version >> 16) & 0xFF, (caps.version >> 8) & 0xFF, caps.version & 0xFF); + cout << "Capabilities : " << caps.capabilities << endl; + + struct v4l2_cropcap cropcap; + memset(&cropcap, 0, sizeof(cropcap)); + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl (fd, VIDIOC_CROPCAP, &cropcap)) { + perror("Querying Cropping Capabilities"); + return false; + } + + printf( "Camera Cropping :\n" + " Bounds : %dx%d+%d+%d\n" + " Default : %dx%d+%d+%d\n" + " Aspect : %d/%d\n", + cropcap.bounds.width, cropcap.bounds.height, cropcap.bounds.left, cropcap.bounds.top, + cropcap.defrect.width, cropcap.defrect.height, cropcap.defrect.left, cropcap.defrect.top, + cropcap.pixelaspect.numerator, cropcap.pixelaspect.denominator); + + int support_grbg10 = 0; + + struct v4l2_fmtdesc fmtdesc = {0}; + fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + char fourcc[5] = {0}; + char c, e; + printf( " FORMAT : CE Desc\n"); + while (0 == xioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) { + strncpy(fourcc, (char *)&fmtdesc.pixelformat, 4); + if (fmtdesc.pixelformat == V4L2_PIX_FMT_SGRBG10) + support_grbg10 = 1; + c = fmtdesc.flags & 1? 'C' : ' '; + e = fmtdesc.flags & 2? 'E' : ' '; + printf(" %s : %c%c %s\n", fourcc, c, e, fmtdesc.description); + fmtdesc.index++; + } + + /*struct v4l2_format fmt = {0}; + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = 640; + fmt.fmt.pix.height = 480; + //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR24; + //fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY; + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG; + fmt.fmt.pix.field = V4L2_FIELD_NONE; + + if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt)) { + perror("Setting Pixel Format"); + return false; + } + + strncpy(fourcc, (char *)&fmt.fmt.pix.pixelformat, 4); + printf( "Selected mode :\n" + " Width : %d\n" + " Height : %d\n" + " PixFmt : %s\n" + " Field : %d\n", + fmt.fmt.pix.width, + fmt.fmt.pix.height, + fourcc, + fmt.fmt.pix.field);*/ + + double eMin, eMax; int gMin, gMax; + getExposureBounds(eMin, eMax); + cout << "Min exposure : " << eMin << endl; + cout << "Max exposure : " << eMax << endl; + + getGainBounds(gMin, gMax); + cout << "Min gain : " << gMin << endl; + cout << "Max gain : " << gMax << endl; + + return true; + + }; + + vector> CameraV4l2::getCamerasList() { + + vector> camerasList; + + bool loop = true; + bool res = true; + int deviceNumber = 0; + + do { + + string devicePathStr = "/dev/video" + Conversion::intToString(deviceNumber); + + // http://stackoverflow.com/questions/230062/whats-the-best-way-to-check-if-a-file-exists-in-c-cross-platform + + if(access(devicePathStr.c_str(), F_OK) != -1 ) { + + // file exists + + // http://stackoverflow.com/questions/4290834/how-to-get-a-list-of-video-capture-devices-web-cameras-on-linux-ubuntu-c + + int fd; + + if((fd = open(devicePathStr.c_str(), O_RDONLY)) == -1){ + perror("Can't open device"); + res = false; + }else { + + struct v4l2_capability caps = {}; + + if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) { + cout << "Fail Querying Capabilities." << endl; + perror("Querying Capabilities"); + res = false; + }else { + + pair c; + c.first = deviceNumber; + std::string s( reinterpret_cast< char const* >(caps.card) ) ; + c.second = "NAME[" + s + "] SDK[V4L2]"; + camerasList.push_back(c); + + } + } + + close(fd); + + deviceNumber++; + + } else { + + loop = false; + + } + + }while(loop); + + return camerasList; + + } + + bool CameraV4l2::listCameras() { + + bool loop = true; + bool res = true; + int deviceNumber = 0; + + cout << endl << "------------ USB2 CAMERAS WITH V4L2 ----------" << endl << endl; + + do { + + string devicePathStr = "/dev/video" + Conversion::intToString(deviceNumber); + + // http://stackoverflow.com/questions/230062/whats-the-best-way-to-check-if-a-file-exists-in-c-cross-platform + + if(access(devicePathStr.c_str(), F_OK) != -1 ) { + + // file exists + + // http://stackoverflow.com/questions/4290834/how-to-get-a-list-of-video-capture-devices-web-cameras-on-linux-ubuntu-c + + int fd; + + if((fd = open(devicePathStr.c_str(), O_RDONLY)) == -1){ + perror("Can't open device"); + res = false; + }else { + + struct v4l2_capability caps = {}; + + if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) { + perror("Querying Capabilities"); + res = false; + }else { + + cout << "-> [" << deviceNumber << "] " << caps.card << endl; + + } + } + + close(fd); + + deviceNumber++; + + } else { + + // file doesn't exist + if(deviceNumber == 0) + cout << "-> No cameras detected ..." << endl; + loop = false; + + } + + }while(loop); + + cout << endl << "------------------------------------------------" << endl << endl; + + return res; + + } + + bool CameraV4l2::createDevice(int id){ + + string deviceNameStr = "/dev/video" + Conversion::intToString(id); + mDeviceName = deviceNameStr.c_str(); + + struct stat st; + + if (-1 == stat(mDeviceName, &st)) { + fprintf(stderr, "Cannot identify '%s': %d, %s\n", mDeviceName, errno, strerror(errno)); + return false; + } + + if (!S_ISCHR(st.st_mode)) { + fprintf(stderr, "%s is no device\n", mDeviceName); + return false; + } + + fd = open(mDeviceName, O_RDWR /* required */ | O_NONBLOCK, 0); + + if (-1 == fd) { + fprintf(stderr, "Cannot open '%s': %d, %s\n", mDeviceName, errno, strerror(errno)); + return false; + } + + getExposureBounds(expMin, expMax); + getGainBounds(gainMin, gainMax); + + memset(&mFormat, 0, sizeof(mFormat)); + mFormat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + // Preserve original settings as set by v4l2-ctl for example + if (-1 == xioctl(fd, VIDIOC_G_FMT, &mFormat)){ + return false; + } + + return true; + + } + + bool CameraV4l2::setSize(int width, int height, bool customSize) { + mWidth = width; + mHeight = height; + mCustomSize = customSize; + return true; + } + + // if customSize = true --> set width and height values passed in argument + // if customSize = false --> set maximum size + bool CameraV4l2::setSize() { + + int chooseWidth = 0; + int chooseHeight = 0; + bool discreteSize = false; + + bool res = false; + + struct v4l2_frmsizeenum frmsize; + memset(&frmsize, 0, sizeof(frmsize)); + frmsize.pixel_format = mFormat.fmt.pix.pixelformat; // Necessary to set size. + + while(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) { + + switch(frmsize.type) { + + case V4L2_FRMSIZE_TYPE_DISCRETE : + + if(chooseHeight == 0 && chooseWidth == 0) { + + chooseHeight = frmsize.discrete.height; + chooseWidth = frmsize.discrete.width; + + }else { + + if((abs(mWidth - chooseWidth) > abs(mWidth - frmsize.discrete.width)) && (abs(mHeight - chooseHeight) > abs(mHeight - frmsize.discrete.height))) { + chooseWidth = frmsize.discrete.width; + chooseHeight = frmsize.discrete.height; + } + + } + + discreteSize = true; + res = true; + + break; + + case V4L2_FRMSIZE_TYPE_CONTINUOUS : + + break; + + case V4L2_FRMSIZE_TYPE_STEPWISE : + + if(mCustomSize) { + + if(mWidth >= frmsize.stepwise.min_width && mWidth <=frmsize.stepwise.max_width) { + + mFormat.fmt.pix.width = mWidth; + + }else { + + mFormat.fmt.pix.width = frmsize.stepwise.max_width; + + } + + if(mHeight >= frmsize.stepwise.min_height && mHeight <=frmsize.stepwise.max_height) { + + mFormat.fmt.pix.height = mHeight; + + }else { + + mFormat.fmt.pix.height = frmsize.stepwise.max_height; + + } + + }else { + + mFormat.fmt.pix.height = frmsize.stepwise.max_height; + mFormat.fmt.pix.width = frmsize.stepwise.max_width; + + } + + res = true; + + break; + + } + + frmsize.index++; + + } + + if(discreteSize && res) { + + mFormat.fmt.pix.height = chooseHeight; + mFormat.fmt.pix.width = chooseWidth; + + } + + + return res; + + } + + + bool CameraV4l2::getDeviceNameById(int id, string &device){ + + return false; + + } + + bool CameraV4l2::getCameraName() { + + if(fd != -1) { + + struct v4l2_capability caps = {}; + + // http://linuxtv.org/downloads/v4l-dvb-apis/vidioc-querycap.html + + if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) { + perror("Querying Capabilities"); + return false; + } + + cout << "Driver name : " << caps.driver << endl; + cout << "Device name : " << caps.card << endl; + cout << "Device location : " << caps.bus_info << endl; + printf ("Driver version : %u.%u.%u\n",(caps.version >> 16) & 0xFF, (caps.version >> 8) & 0xFF, caps.version & 0xFF); + cout << "Capabilities : " << caps.capabilities << endl; + + return true; + + } + + return false; + + } + + bool CameraV4l2::grabInitialization(){ + + struct v4l2_capability cap; + struct v4l2_cropcap cropcap; + struct v4l2_crop crop; + + unsigned int min; + + if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) + { + if (EINVAL == errno) + { + fprintf(stderr, + "%s is no V4L2 device\n", + mDeviceName); + exit(EXIT_FAILURE); + } + else + { + errno_exit("VIDIOC_QUERYCAP"); + } + } + + if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) + { + fprintf(stderr, + "%s is no video capture device\n", + mDeviceName); + exit(EXIT_FAILURE); + } + + switch (io) + { + case IO_METHOD_READ: + { + if (!(cap.capabilities & V4L2_CAP_READWRITE)) + { + fprintf(stderr, + "%s does not support read i/o\n", + mDeviceName); + exit(EXIT_FAILURE); + } + break; + } + case IO_METHOD_MMAP: + case IO_METHOD_USERPTR: + { + if (!(cap.capabilities & V4L2_CAP_STREAMING)) + { + fprintf(stderr, "%s does not support streaming i/o\n", + mDeviceName); + exit(EXIT_FAILURE); + } + break; + } + } + + // Select video input, video standard and tune here. + + memset(&cropcap, 0, sizeof(cropcap)); + + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (0 == xioctl(fd, VIDIOC_CROPCAP, &cropcap)) { + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + crop.c = cropcap.defrect; // reset to default + + if (-1 == xioctl(fd, VIDIOC_S_CROP, &crop)) { + switch (errno) { + case EINVAL: + // Cropping not supported. + break; + default: + // Errors ignored. + break; + } + } + } else { + // Errors ignored. + } + + // Set some parameters...SIZE + + if(!setSize()) + return false; + + if(-1 == xioctl(fd, VIDIOC_S_FMT, &mFormat)) { + cout << "Fail to set fmt." << endl; + return false; + } + + /* Buggy driver paranoia. */ + min = mFormat.fmt.pix.width * 2; + if (mFormat.fmt.pix.bytesperline < min) + mFormat.fmt.pix.bytesperline = min; + min = mFormat.fmt.pix.bytesperline * mFormat.fmt.pix.height; + if (mFormat.fmt.pix.sizeimage < min) + mFormat.fmt.pix.sizeimage = min; + + return true; + + } + + void CameraV4l2::grabCleanse(){ + + // Uninit device + + unsigned int i; + + if(buffers != NULL) { + switch (io) { + + case IO_METHOD_READ: + free(buffers[0].start); + break; + + case IO_METHOD_MMAP: + + for (i = 0; i < n_buffers; ++i) + if (-1 == munmap(buffers[i].start, buffers[i].length)) + errno_exit("munmap"); + break; + + case IO_METHOD_USERPTR: + for (i = 0; i < n_buffers; ++i) + free(buffers[i].start); + break; + } + + free(buffers); + } + + // Close device + + if (-1 == close(fd)) + errno_exit("close"); + + fd = -1; + + } + + bool CameraV4l2::acqStart(){ + + // INIT DEVICE + + unsigned int i; + enum v4l2_buf_type type; + + switch (io) { + case IO_METHOD_READ: + init_read(mFormat.fmt.pix.sizeimage); + break; + + case IO_METHOD_MMAP: + init_mmap(); + break; + + case IO_METHOD_USERPTR: + init_userp(mFormat.fmt.pix.sizeimage); + break; + } + + // START CAPTURING + + switch (io) { + case IO_METHOD_READ: + { + /* Nothing to do. */ + break; + } + case IO_METHOD_MMAP: + { + for (i = 0; i < n_buffers; ++i) + { + struct v4l2_buffer buf; + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = i; + + if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) + { + errno_exit("VIDIOC_QBUF"); + return false; + } + } + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(fd, VIDIOC_STREAMON, &type)) + { + errno_exit("VIDIOC_STREAMON"); + return false; + } + break; + } + case IO_METHOD_USERPTR: + { + for (i = 0; i < n_buffers; ++i) + { + struct v4l2_buffer buf; + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.index = i; + buf.m.userptr = (unsigned long)buffers[i].start; + buf.length = buffers[i].length; + + if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) + { + errno_exit("VIDIOC_QBUF"); + return false; + } + } + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(fd, VIDIOC_STREAMON, &type)) + { + errno_exit("VIDIOC_STREAMON"); + return false; + } + break; + } + } + + return true; + } + + void CameraV4l2::acqStop(){ + + enum v4l2_buf_type type; + + switch (io) + { + case IO_METHOD_READ: + { + /* Nothing to do. */ + break; + } + case IO_METHOD_MMAP: + case IO_METHOD_USERPTR: + type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == xioctl(fd, VIDIOC_STREAMOFF, &type)) + { + errno_exit("VIDIOC_STREAMOFF"); + } + break; + } + + } + + bool CameraV4l2::grabImage(Frame &newFrame) { + + unsigned char* ImageBuffer = NULL; + + Mat img = Mat(mFormat.fmt.pix.height,mFormat.fmt.pix.width,CV_8UC1, Scalar(0)); + size_t s = mFormat.fmt.pix.width*mFormat.fmt.pix.height; + + bool grabSuccess = false; + + for(;;) { + + fd_set fds; + struct timeval tv; + int r; + + FD_ZERO(&fds); + FD_SET(fd, &fds); + + /* Timeout. */ + tv.tv_sec = 2; + tv.tv_usec = 0; + + r = select(fd + 1, &fds, NULL, NULL, &tv); + + if(-1 == r) { + if (EINTR == errno) + continue; + errno_exit("select"); + } + + if(0 == r) { + fprintf(stderr, "select timeout\n"); + BOOST_LOG_SEV(logger, warning) << "Select timeout !"; + //exit(EXIT_FAILURE); + } + + if(read_frame()) { + grabSuccess = true; + break; + } + /* EAGAIN - continue select loop. */ + } + + if(grabSuccess) { + + ImageBuffer = (unsigned char*)buffers[buf.index].start; + + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + newFrame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time)); + + double fps = 0; + if(getFPS(fps)) + newFrame.mFps = fps; + newFrame.mFormat = MONO8; + newFrame.mSaturatedValue = 255; + newFrame.mFrameNumber = mFrameCounter; + newFrame.mExposure = exp; + newFrame.mGain = gain; + mFrameCounter++; + + if(!convertImage(ImageBuffer, newFrame.mImg)) + grabSuccess = false; + + } + + return grabSuccess; + + } + + bool CameraV4l2::grabSingleImage(Frame &frame, int camID){ + + createDevice(camID); + + if(frame.mHeight > 0 && frame.mWidth > 0) { + + cout << "Setting size to : " << frame.mWidth << "x" << frame.mHeight << endl; + mWidth = frame.mWidth; + mHeight = frame.mHeight; + mCustomSize = true; + + } + + grabInitialization(); + + acqStart(); + + cout << ">> Height : " << mFormat.fmt.pix.height << endl; + cout << ">> Width : " << mFormat.fmt.pix.width << endl; + + if(!setPixelFormat(frame.mFormat)) + return false; + + if(expMin != -1 && expMax != -1) + setExposureTime(frame.mExposure); + if(expMin != -1 && expMax != -1) + setGain(frame.mGain); + + unsigned char* ImageBuffer = NULL; + + Mat img = Mat(mFormat.fmt.pix.height,mFormat.fmt.pix.width,CV_8UC1, Scalar(0)); + size_t s = mFormat.fmt.pix.width*mFormat.fmt.pix.height; + + bool grabSuccess = false; + + for(int i = 0; i< n_buffers; i++) { + + + for(;;) { + + fd_set fds; + struct timeval tv; + int r; + + FD_ZERO(&fds); + FD_SET(fd, &fds); + + /* Timeout. */ + int timeout = 2; + + if(frame.mExposure/1000000 > 1) + timeout = timeout + (int)(frame.mExposure/1000000); + + tv.tv_sec = timeout; + tv.tv_usec = 0; + + r = select(fd + 1, &fds, NULL, NULL, &tv); + + if(-1 == r) { + if (EINTR == errno) + continue; + errno_exit("select"); + } + + if(0 == r) { + fprintf(stderr, "select timeout\n"); + exit(EXIT_FAILURE); + } + + if(read_frame()) { + grabSuccess = true; + break; + } + /* EAGAIN - continue select loop. */ + } + } + + if(grabSuccess) { + + ImageBuffer = (unsigned char*)buffers[buf.index].start; + + + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + frame.mDate = TimeDate::splitIsoExtendedDate(to_iso_extended_string(time)); + + double fps = 0; + if(getFPS(fps)) + frame.mFps = fps; + frame.mSaturatedValue = 255; + frame.mFrameNumber = mFrameCounter; + + cout << "size image buffer : " << sizeof(buffers[buf.index].start) << endl; + if(!convertImage(ImageBuffer, frame.mImg)) + grabSuccess = false; + + } + + acqStop(); + grabCleanse(); + + return grabSuccess; + + } + + bool CameraV4l2::convertImage(unsigned char* buffer, Mat &image) { + + bool res = false; + + if(buffer != NULL) { + + switch(mFormat.fmt.pix.pixelformat) { + + case V4L2_PIX_FMT_GREY : + + { + + image = Mat(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC1, Scalar(0)); + memcpy(image.ptr(), buffer, mFormat.fmt.pix.width*mFormat.fmt.pix.height); + res = true; + + } + + break; + + case V4L2_PIX_FMT_YUYV : + + { + unsigned char* bigbuffer = (unsigned char*)malloc(mFormat.fmt.pix.height * mFormat.fmt.pix.width*3*sizeof(char)); + Mat dispimg(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, bigbuffer); + PixFmtConv::YUYV_to_BGR24(buffer, bigbuffer, mFormat.fmt.pix.width, mFormat.fmt.pix.height, mFormat.fmt.pix.bytesperline); + cvtColor(dispimg,image,CV_BGR2GRAY); + res = true; + free(bigbuffer); + + } + + break; + + case V4L2_PIX_FMT_UYVY : + + { + unsigned char bigbuffer[mFormat.fmt.pix.height * mFormat.fmt.pix.width*3]; + PixFmtConv::UYVY_to_BGR24(buffer, bigbuffer, mFormat.fmt.pix.width, mFormat.fmt.pix.height, mFormat.fmt.pix.bytesperline); + Mat dispimg(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, bigbuffer); + cvtColor(dispimg,image,CV_BGR2GRAY); + res = true; + } + + break; + + case V4L2_PIX_FMT_RGB565 : + + { + + unsigned char bigbuffer[mFormat.fmt.pix.height * mFormat.fmt.pix.width*3]; + PixFmtConv::RGB565_to_BGR24(buffer, bigbuffer, mFormat.fmt.pix.width, mFormat.fmt.pix.height); + Mat dispimg(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, bigbuffer); + cvtColor(dispimg,image,CV_BGR2GRAY); + res = true; + + } + + break; + + case V4L2_PIX_FMT_BGR24 : + + { + Mat dispimg = Mat(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, buffer); + cvtColor(dispimg,image,CV_BGR2GRAY); + res = true; + + } + + break; + + case V4L2_PIX_FMT_RGB24 : + + { + Mat dispimg = Mat(mFormat.fmt.pix.height, mFormat.fmt.pix.width, CV_8UC3, buffer); + cvtColor(dispimg,image,CV_BGR2GRAY); + res = true; + + } + + break; + + } + + } + + return res; + + } + + void CameraV4l2::getExposureBounds(double &eMin, double &eMax){ + + struct v4l2_queryctrl queryctrl; + memset(&queryctrl, 0, sizeof(queryctrl)); + queryctrl.id = V4L2_CID_EXPOSURE_ABSOLUTE; + + if (-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) { + + if (errno != EINVAL) { + + perror("VIDIOC_QUERYCTRL"); + exit(EXIT_FAILURE); + + } else { + + printf(">> V4L2_CID_EXPOSURE_ABSOLUTE is not supported\n"); + eMin = -1; + eMax = -1; + + } + + } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { + + printf(">> V4L2_CID_EXPOSURE_ABSOLUTE is not supported\n"); + eMin = -1; + eMax = -1; + + } else { + + /*cout << "Name : " << queryctrl.name << endl; + cout << "Min : " << queryctrl.minimum << endl; + cout << "Max : " << queryctrl.maximum << endl; + cout << "Step : " << queryctrl.step << endl; + cout << "Default : " << queryctrl.default_value << endl; + cout << "Flags : " << queryctrl.flags << endl;*/ + + eMin = queryctrl.minimum; + eMax = queryctrl.maximum; + + } + + } + + double CameraV4l2::getExposureTime(){ + + struct v4l2_control control; + memset(&control, 0, sizeof(control)); + control.id = V4L2_CID_EXPOSURE_ABSOLUTE; + + if(0 == ioctl(fd, VIDIOC_G_CTRL, &control)) { + + return control.value * 100; + + // Ignore if V4L2_CID_CONTRAST is unsupported + } else if (errno != EINVAL) { + + perror("VIDIOC_G_CTRL"); + + } + + return 0; + + } + + void CameraV4l2::getGainBounds(int &gMin, int &gMax){ + + struct v4l2_queryctrl queryctrl; + memset(&queryctrl, 0, sizeof(queryctrl)); + queryctrl.id = V4L2_CID_GAIN; + + if (-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) { + + if (errno != EINVAL) { + + perror("VIDIOC_QUERYCTRL"); + exit(EXIT_FAILURE); + + } else { + + printf(">> V4L2_CID_GAIN is not supported\n"); + gainMin = -1; + gainMax = -1; + + } + + } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { + + printf(">> V4L2_CID_GAIN is not supported\n"); + gainMin = -1; + gainMax = -1; + + } else { + + /*cout << "Name : " << queryctrl.name << endl; + cout << "Min : " << queryctrl.minimum << endl; + cout << "Max : " << queryctrl.maximum << endl; + cout << "Step : " << queryctrl.step << endl; + cout << "Default : " << queryctrl.default_value << endl; + cout << "Flags : " << queryctrl.flags << endl;*/ + + gMin = queryctrl.minimum; + gMax = queryctrl.maximum; + + } + + } + + bool CameraV4l2::getPixelFormat(CamPixFmt &format){ + + /*char fourcc[5] = {0}; + + struct v4l2_format fmt; + memset(&fmt, 0, sizeof(fmt)); + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = mWidth; + fmt.fmt.pix.height = mHeight; + fmt.fmt.pix.field = V4L2_FIELD_NONE; + + if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)) { + perror("Getting Pixel Format"); + return false; + } + + // http://linuxtv.org/downloads/v4l-dvb-apis/V4L2-PIX-FMT-GREY.html + if(fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_GREY) { + + strncpy(fourcc, (char *)&fmt.fmt.pix.pixelformat, 4); + cout << "Pixel format : V4L2_PIX_FMT_GREY" << endl; + format = MONO_8; + + // http://linuxtv.org/downloads/v4l-dvb-apis/V4L2-PIX-FMT-Y12.html + }else if(fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_Y12) { + + strncpy(fourcc, (char *)&fmt.fmt.pix.pixelformat, 4); + cout << "Pixel format : V4L2_PIX_FMT_Y12" << endl; + format = MONO_12; + + }*/ + + return true; + } + + bool CameraV4l2::getFrameSizeEnum() { + + bool res = false; + + struct v4l2_frmsizeenum frmsize; + memset(&frmsize, 0, sizeof(frmsize)); + frmsize.pixel_format = mFormat.fmt.pix.pixelformat; + + while(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) { + + switch(frmsize.type) { + + case V4L2_FRMSIZE_TYPE_DISCRETE : + + cout << "[" << frmsize.index << "] : " << frmsize.discrete.width << "x" << frmsize.discrete.height << endl; + res = true; + + break; + + case V4L2_FRMSIZE_TYPE_CONTINUOUS : + + break; + + case V4L2_FRMSIZE_TYPE_STEPWISE : + + cout << "Min width : " << frmsize.stepwise.min_width << endl; + cout << "Max width : " << frmsize.stepwise.max_width << endl; + cout << "Step width : " << frmsize.stepwise.step_width << endl; + + cout << "Min height : " << frmsize.stepwise.min_height << endl; + cout << "Max height : " << frmsize.stepwise.max_height << endl; + cout << "Step height : " << frmsize.stepwise.step_height << endl; + + break; + + } + + frmsize.index++; + + } + + return res; + + } + + bool CameraV4l2::getFrameSize(int &w, int &h) { + + w = 0; + h = 0; + + struct v4l2_format fmt; + memset(&fmt, 0, sizeof(fmt)); + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.field = V4L2_FIELD_NONE; + + if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)) { + perror("Getting Pixel Format"); + return false; + } + + h = fmt.fmt.pix.height; + w = fmt.fmt.pix.width; + + return true; + + } + + bool CameraV4l2::getFpsEnum(vector &values){ + + bool res = false; + + struct v4l2_frmivalenum temp; + memset(&temp, 0, sizeof(temp)); + temp.pixel_format = mFormat.fmt.pix.pixelformat; + temp.width = mFormat.fmt.pix.width; + temp.height = mFormat.fmt.pix.height; + + ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &temp); + if (temp.type == V4L2_FRMIVAL_TYPE_DISCRETE) { + while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &temp) != -1) { + values.push_back(float(temp.discrete.denominator)/temp.discrete.numerator); + cout << values.back() << " fps" << endl; + temp.index += 1; + res = true; + } + } + float stepval = 0; + if (temp.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) { + stepval = 1; + } + if (temp.type == V4L2_FRMIVAL_TYPE_STEPWISE || temp.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) { + float minval = float(temp.stepwise.min.numerator)/temp.stepwise.min.denominator; + float maxval = float(temp.stepwise.max.numerator)/temp.stepwise.max.denominator; + if (stepval == 0) { + stepval = float(temp.stepwise.step.numerator)/temp.stepwise.step.denominator; + } + for (float cval = minval; cval <= maxval; cval += stepval) { + cout << 1/cval << " fps" << endl; + values.push_back(1.0/cval); + res = true; + } + } + + return res; + + } + + bool CameraV4l2::getFPS(double &value) { + + struct v4l2_streamparm streamparm; + struct v4l2_fract *tpf; + + streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == ioctl(fd, VIDIOC_G_PARM, &streamparm)) { + cout << "Fail to read fps value." << endl; + return false; + } + + tpf = &streamparm.parm.capture.timeperframe; + + value = (double)tpf->denominator / (double)tpf->numerator; + + return true; + } + + string CameraV4l2::getModelName(){ + + struct v4l2_capability caps = {}; + + // http://linuxtv.org/downloads/v4l-dvb-apis/vidioc-querycap.html + + if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &caps)) { + perror("Querying device's name"); + return ""; + } + + return (char*)caps.card; + + } + + bool CameraV4l2::setExposureTime(double val){ + + if(expMax > 0 && expMin > 0 && val >= expMin && val <= expMax) { + + // ************************ DISABLE AUTO EXPOSURE ***************************** + + struct v4l2_queryctrl queryctrl1; + struct v4l2_control control1; + memset(&queryctrl1, 0, sizeof(queryctrl1)); + queryctrl1.id = V4L2_CID_EXPOSURE_AUTO; + + if(-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl1)) { + + if(errno != EINVAL) { + + perror("VIDIOC_QUERYCTRL"); + return false; + + }else { + + printf(">> V4L2_CID_EXPOSURE_AUTO is not supported\n"); + + } + + }else if (queryctrl1.flags & V4L2_CTRL_FLAG_DISABLED) { + + printf(">> V4L2_CID_EXPOSURE_AUTO is not supported\n"); + + }else { + + memset(&control1, 0, sizeof (control1)); + control1.id = V4L2_CID_EXPOSURE_AUTO; + control1.value = V4L2_EXPOSURE_MANUAL; + + if (-1 == ioctl(fd, VIDIOC_S_CTRL, &control1)) { + perror("VIDIOC_S_CTRL"); + return false; + } + + cout << ">> Manual exposure setted." << endl; + + } + + // ************************ SET AUTO EXPOSURE ***************************** + + struct v4l2_queryctrl queryctrl; + struct v4l2_control control; + memset(&queryctrl, 0, sizeof(queryctrl)); + queryctrl.id = V4L2_CID_EXPOSURE_ABSOLUTE; + + if(-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) { + + if(errno != EINVAL) { + + perror("VIDIOC_QUERYCTRL"); + return false; + + }else { + + printf(">> V4L2_CID_EXPOSURE_ABSOLUTE is not supported\n"); + + } + + }else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { + + printf(">> V4L2_CID_EXPOSURE_ABSOLUTE is not supported\n"); + + }else { + + memset(&control, 0, sizeof (control)); + control.id = V4L2_CID_EXPOSURE_ABSOLUTE; + + /* + V4L2_CID_EXPOSURE_ABSOLUTE integer + Determines the exposure time of the camera sensor. + The exposure time is limited by the frame interval. + Drivers should interpret the values as 100 µs units, w + here the value 1 stands for 1/10000th of a second, 10000 + for 1 second and 100000 for 10 seconds. + */ + + control.value = val/100; + exp = val; + printf(">> V4L2_CID_EXPOSURE_ABSOLUTE setted to %f (%f with V4L2)\n", val, val/100); + + if (-1 == ioctl(fd, VIDIOC_S_CTRL, &control)) { + perror("VIDIOC_S_CTRL"); + return false; + } + + } + + return true; + + }else { + + if(expMin == -1 && expMax == -1) { + + cout << "Exposure time not supported." << endl; + return true; + + } + + cout << "> Exposure value (" << val << ") is not in range [ " << expMin << " - " << expMax << " ]" << endl; + + } + + return false; + } + + bool CameraV4l2::setGain(int val){ + + if(gainMax > 0 && gainMin > 0 && val >= gainMin && val <= gainMax) { + + struct v4l2_queryctrl queryctrl; + struct v4l2_control control; + memset(&queryctrl, 0, sizeof(queryctrl)); + queryctrl.id = V4L2_CID_GAIN; + + if(-1 == ioctl(fd, VIDIOC_QUERYCTRL, &queryctrl)) { + + if(errno != EINVAL) { + + perror("VIDIOC_QUERYCTRL"); + return false; + + }else { + + printf(">> V4L2_CID_GAIN is not supported\n"); + + } + + }else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) { + + printf(">> V4L2_CID_GAIN is not supported\n"); + + }else { + + memset(&control, 0, sizeof (control)); + control.id = V4L2_CID_GAIN; + control.value = val; + gain = val; + + if (-1 == ioctl(fd, VIDIOC_S_CTRL, &control)) { + perror("VIDIOC_S_CTRL"); + return false; + } + + } + + return true; + + }else { + + if(gainMin == -1 && gainMax == -1) { + + cout << "Gain not supported." << endl; + return true; + + } + + cout << "> Gain value (" << val << ") is not in range [ " << gainMin << " - " << gainMax << " ]" << endl; + + } + + return false; + + } + + bool CameraV4l2::setFPS(double fps){ + + bool res = true; + struct v4l2_frmivalenum temp; + memset(&temp, 0, sizeof(temp)); + temp.pixel_format = mFormat.fmt.pix.pixelformat; + temp.width = mFormat.fmt.pix.width; + temp.height = mFormat.fmt.pix.height; + + ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &temp); + + if (temp.type == V4L2_FRMIVAL_TYPE_DISCRETE) { + + vector frameIntervals; + while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &temp) != -1) { + + if(fps == (float(temp.discrete.denominator)/temp.discrete.numerator)) { + + struct v4l2_streamparm setfps; + struct v4l2_fract *tpf; + memset (&setfps, 0, sizeof (setfps)); + setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + tpf = &setfps.parm.capture.timeperframe; + + tpf->numerator = temp.discrete.numerator; + //cout << "numerator : " << tpf->numerator << endl; + tpf->denominator = temp.discrete.denominator;//cvRound(fps); + //cout << "denominator : " << tpf->denominator << endl; + //retval=1; + if (ioctl(fd, VIDIOC_S_PARM, &setfps) < 0) { + cout << "Failed to set camera FPS:" << strerror(errno) << endl; + res = false; + break; + } + + break; + + } + + temp.index += 1; + + } + } + + float stepval = 0; + if (temp.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) { + stepval = 1; + cout << "V4L2_FRMIVAL_TYPE_CONTINUOUS" << endl; + struct v4l2_streamparm setfps; + struct v4l2_fract *tpf; + memset (&setfps, 0, sizeof (setfps)); + setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + tpf = &setfps.parm.capture.timeperframe; + + tpf->numerator = 1000; + //cout << "numerator : " << tpf->numerator << endl; + tpf->denominator = fps*1000;//cvRound(fps); + //cout << "denominator : " << tpf->denominator << endl; + //retval=1; + if (ioctl(fd, VIDIOC_S_PARM, &setfps) < 0) { + cout << "Failed to set camera FPS:" << strerror(errno) << endl; + res = false; + + }else{ + + if (!tpf->denominator || !tpf->numerator) + printf("Invalid frame rate\n"); + else + printf("Frame rate set to %.3f fps\n", + 1.0 * tpf->denominator / tpf->numerator); + } + } + + if (temp.type == V4L2_FRMIVAL_TYPE_STEPWISE) { + cout << "V4L2_FRMIVAL_TYPE_STEPWISE" << endl; + float minval = float(temp.stepwise.min.numerator)/temp.stepwise.min.denominator; + float maxval = float(temp.stepwise.max.numerator)/temp.stepwise.max.denominator; + if (stepval == 0) { + stepval = float(temp.stepwise.step.numerator)/temp.stepwise.step.denominator; + } + /*for (float cval = minval; cval <= maxval; cval += stepval) { + cout << 1/cval << " fps" << endl; + + }*/ + + + + } + + return res; + + } + + bool CameraV4l2::setPixelFormat(CamPixFmt depth){ + + struct v4l2_fmtdesc fmtdesc = {0}; + fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + char fourcc[5] = {0}; + bool fmtFound = false; + char c, e; + mFormat.fmt.pix.field = V4L2_FIELD_NONE; + EParser fmt; + string fstring = fmt.getStringEnum(depth); + + while (0 == xioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) { + + strncpy(fourcc, (char *)&fmtdesc.pixelformat, 4); + + if(string(fourcc) == fstring) { + + fmtFound = true; + + switch(depth) { + + case MONO8 : + + { + + mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY; + + } + + break; + + case GREY : + + { + + mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_GREY; + + } + + break; + + case YUYV : + + { + + mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; + + } + + break; + + case UYVY : + + { + + mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; + + } + + break; + + case RGB565 : + + { + + mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565; + + } + + break; + + case BGR3 : + + { + + mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_BGR24; + + } + + break; + + case RGB3 : + + { + + mFormat.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24; + + } + + break; + + } + + + break; + } + + fmtdesc.index++; + } + + if(!fmtFound) { + + BOOST_LOG_SEV(logger, critical) << ">> FORMAT " << fstring << " NOT SUPPORTED !"; + return false; + } + + //strncpy(fourcc, (char *)&mFormat.fmt.pix.pixelformat, 4); + + return true; + + } + + void CameraV4l2::getAvailablePixelFormats(){ + + struct v4l2_fmtdesc fmtdesc = {0}; + fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + char fourcc[5] = {0}; + vector pixfmt; + char c, e; + struct v4l2_format pfmt; + memset(&pfmt, 0, sizeof(pfmt)); + pfmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + pfmt.fmt.pix.field = V4L2_FIELD_NONE; + + cout << ">> Device pixel formats :" << endl; + + while (0 == xioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) { + + strncpy(fourcc, (char *)&fmtdesc.pixelformat, 4); + + c = fmtdesc.flags & 1? 'C' : ' '; + e = fmtdesc.flags & 2? 'E' : ' '; + //printf(" %s : %c%c %s\n", fourcc, c, e, fmtdesc.description); + string fmt = string(fourcc); + std::transform(fmt.begin(), fmt.end(),fmt.begin(), ::toupper); + pixfmt.push_back(fmt); + cout << "- " << fmt << endl; + fmtdesc.index++; + } + + // Compare found pixel formats to currently formats supported by freeture + + cout << endl << ">> Available pixel formats :" << endl; + EParser fmt; + + for( int i = 0; i != pixfmt.size(); i++ ) { + + if(fmt.isEnumValue(pixfmt.at(i))) { + + cout << "- " << pixfmt.at(i) << " available --> ID : " << fmt.parseEnum(pixfmt.at(i)) << endl; + + } + + } + + } + + void CameraV4l2::errno_exit (const char *s) { + fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno)); + exit(EXIT_FAILURE); + } + + int CameraV4l2::xioctl (int fh, int request, void *arg) { + int r; + + do + { + r = ioctl(fh, request, arg); + } while (-1 == r && EINTR == errno); + + return r; + } + + int CameraV4l2::read_frame (void) { + //struct v4l2_buffer buf; + unsigned int i; + + switch (io) + { + case IO_METHOD_READ: + { + + if (-1 == read(fd, buffers[0].start, buffers[0].length)) + { + switch (errno) + { + case EAGAIN: + return 0; + + case EIO: + /* Could ignore EIO, see spec. */ + + /* fall through */ + + default: + errno_exit("read"); + } + } + + break; + } + case IO_METHOD_MMAP: + { + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + + if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) + { + switch (errno) + { + case EAGAIN: + return 0; + + case EIO: + /* Could ignore EIO, see spec. */ + + /* fall through */ + + default: + errno_exit("VIDIOC_DQBUF"); + } + } + + assert(buf.index < n_buffers); + + if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) + errno_exit("VIDIOC_QBUF"); + break; + } + case IO_METHOD_USERPTR: + { + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + + if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) + { + switch (errno) + { + case EAGAIN: + return 0; + + case EIO: + /* Could ignore EIO, see spec. */ + + /* fall through */ + + default: + { + errno_exit("VIDIOC_DQBUF"); + } + } + } + + for (i = 0; i < n_buffers; ++i) + { + if (buf.m.userptr == (unsigned long)buffers[i].start + && buf.length == buffers[i].length) + break; + } + assert(i < n_buffers); + + if (-1 == xioctl(fd, VIDIOC_QBUF, &buf)) + { + errno_exit("VIDIOC_QBUF"); + } + break; + } + } + + return 1; + } + + void CameraV4l2::init_read (unsigned int buffer_size) { + buffers = (buffer*)(calloc(1, sizeof(*buffers))); + + if (!buffers) + { + fprintf(stderr, "Out of memory\n"); + exit(EXIT_FAILURE); + } + + buffers[0].length = buffer_size; + buffers[0].start = malloc(buffer_size); + + if (!buffers[0].start) + { + fprintf(stderr, "Out of memory\n"); + exit(EXIT_FAILURE); + } + } + + void CameraV4l2::init_mmap (void) { + + struct v4l2_requestbuffers req; + + memset(&req, 0, sizeof(req)); + + req.count = 4; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req.memory = V4L2_MEMORY_MMAP; + + if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) + { + if (EINVAL == errno) + { + fprintf(stderr, "%s does not support " + "memory mapping\n", mDeviceName); + exit(EXIT_FAILURE); + } + else + { + errno_exit("VIDIOC_REQBUFS"); + } + } + + if (req.count < 2) \ + { + fprintf(stderr, "Insufficient buffer memory on %s\n", + mDeviceName); + exit(EXIT_FAILURE); + } + + buffers = (buffer*)calloc(req.count, sizeof(*buffers)); + + if (!buffers) + { + fprintf(stderr, "Out of memory\n"); + exit(EXIT_FAILURE); + } + + for (n_buffers = 0; n_buffers < req.count; ++n_buffers) + { + struct v4l2_buffer buf; + + memset(&buf, 0, sizeof(buf)); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = n_buffers; + + if (-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf)) + errno_exit("VIDIOC_QUERYBUF"); + + buffers[n_buffers].length = buf.length; + + buffers[n_buffers].start = + mmap(NULL /* start anywhere */, + buf.length, + PROT_READ | PROT_WRITE /* required */, + MAP_SHARED /* recommended */, + fd, buf.m.offset); + + if (MAP_FAILED == buffers[n_buffers].start) + errno_exit("mmap"); + } + + } + + void CameraV4l2::init_userp (unsigned int buffer_size) { + + struct v4l2_requestbuffers req; + + memset(&req, 0, sizeof(req)); + + req.count = 4; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req.memory = V4L2_MEMORY_USERPTR; + + if (-1 == xioctl(fd, VIDIOC_REQBUFS, &req)) + { + if (EINVAL == errno) + { + fprintf(stderr, "%s does not support " + "user pointer i/o\n", mDeviceName); + exit(EXIT_FAILURE); + } + else + { + errno_exit("VIDIOC_REQBUFS"); + } + } + + buffers = (buffer*)calloc(4, sizeof(*buffers)); + + if (!buffers) + { + fprintf(stderr, "Out of memory\n"); + exit(EXIT_FAILURE); + } + + for (n_buffers = 0; n_buffers < 4; ++n_buffers) + { + buffers[n_buffers].length = buffer_size; + buffers[n_buffers].start = malloc(buffer_size); + + if (!buffers[n_buffers].start) + { + fprintf(stderr, "Out of memory\n"); + exit(EXIT_FAILURE); + } + } + } + +#endif diff --git a/CameraV4l2.h b/CameraV4l2.h new file mode 100644 index 0000000..d6ffc02 --- /dev/null +++ b/CameraV4l2.h @@ -0,0 +1,190 @@ +/* CameraV4l2.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 17/08/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraV4l2.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 17/08/2015 +*/ + +#pragma once + +#include "config.h" + +#ifdef LINUX + + #include "opencv2/highgui/highgui.hpp" + #include + + #include + #include + #include "Frame.h" + #include "TimeDate.h" + #include "Camera.h" + #include + + #include + #include + #include + #include + + + #include /* low-level i/o */ + #include + #include + #include + #include + #include + #include + #include + + #include + + #define BOOST_LOG_DYN_LINK 1 + + #include "EParser.h" + #include + #include + #include + #include + #include + #include + #include + #include + #include + #include + #include "ELogSeverityLevel.h" + #include "PixFmtConv.h" + #include + + using namespace cv; + using namespace std; + + class CameraV4l2: public Camera { + + private: + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init{ + + public: + + Init(){ + + logger.add_attribute("ClassName", boost::log::attributes::constant("CameraV4l2")); + + } + + }initializer; + + const char* mDeviceName; + int fd; + double expMin, expMax, exp; + int gainMin, gainMax, gain; + int mWidth, mHeight; + int mFrameCounter; + struct v4l2_format mFormat; + bool mCustomSize; + + public : + + void init_userp (unsigned int buffer_size); + void init_mmap (void); + void init_read (unsigned int buffer_size); + int read_frame (void); + void errno_exit (const char *s); + int xioctl (int fh, int request, void *arg); + + + CameraV4l2(); + + ~CameraV4l2(); + + bool getInfos(); + + vector> getCamerasList(); + + bool listCameras(); + + bool createDevice(int id); + + bool setSize(int width, int height, bool customSize); + + bool grabInitialization(); + + void grabCleanse(); + + bool acqStart(); + + void acqStop(); + + bool grabImage(Frame& newFrame); + + bool grabSingleImage(Frame &frame, int camID); + + bool getDeviceNameById(int id, string &device); + + bool getCameraName(); + + void getExposureBounds(double &eMin, double &eMax); + + void getGainBounds(int &gMin, int &gMax); + + bool getPixelFormat(CamPixFmt &format); + + bool getFrameSize(int &w, int &h); + + bool getFrameSizeEnum(); + + bool getFPS(double &value); + + bool getFpsEnum(vector &values); + + string getModelName(); + + double getExposureTime(); + + bool setExposureTime(double exp); + + bool setGain(int gain); + + bool setFPS(double fps); + + bool setPixelFormat(CamPixFmt depth); + + void getAvailablePixelFormats(); + + + private : + + bool convertImage(unsigned char* buffer, Mat &image); + + bool setSize(); + + }; + +#endif diff --git a/CameraVideo.cpp b/CameraVideo.cpp new file mode 100644 index 0000000..c2321b4 --- /dev/null +++ b/CameraVideo.cpp @@ -0,0 +1,155 @@ +/* + CameraVideo.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraVideo.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 13/06/2014 +* \brief Acquisition thread with video in input. +*/ + +#include "CameraVideo.h" + +boost::log::sources::severity_logger< LogSeverityLevel > CameraVideo::logger; + +CameraVideo::Init CameraVideo::initializer; + +CameraVideo::CameraVideo(vector videoList, bool verbose):mVideoID(0), mFrameWidth(0), mFrameHeight(0), mReadDataStatus(false){ + + mVideoList = videoList; + + // Open the video file for reading. + if(mVideoList.size()>0) + mCap = VideoCapture(videoList.front()); + else + throw "No video path in input."; + + mExposureAvailable = false; + mGainAvailable = false; + mInputDeviceType = VIDEO; + mVerbose = verbose; + +} + +CameraVideo::~CameraVideo(void){ + +} + +bool CameraVideo::grabInitialization(){ + + if(!mCap.isOpened()) { + + if(mVerbose) BOOST_LOG_SEV(logger,fail) << "Cannot open the video file"; + if(mVerbose) cout << "Cannot open the video file" << endl; + return false; + } + + return true; + +} + +bool CameraVideo::getStopStatus(){ + + return mReadDataStatus; + +} + +bool CameraVideo::getDataSetStatus(){ + + if(mVideoID == mVideoList.size()) + return false; + else + return true; +} + +bool CameraVideo::loadNextDataSet(string &location){ + + if(mVideoID != 0){ + + cout << "Change video : " << mVideoID << " - Path : " << mVideoList.at(mVideoID) << endl; + + mCap = VideoCapture(mVideoList.at(mVideoID)); + + if(!mCap.isOpened()){ + + cout << "Cannot open the video file" << endl; + return false; + + }else{ + + cout << "Success to open the video file" << endl; + + } + + mFrameHeight = mCap.get(CV_CAP_PROP_FRAME_HEIGHT); + + mFrameWidth = mCap.get(CV_CAP_PROP_FRAME_WIDTH); + + mReadDataStatus = false; + + } + + return true; + +} + +bool CameraVideo::createDevice(int id) { + return true; +} + +bool CameraVideo::grabImage(Frame &img){ + + Mat frame; + + if(mCap.read(frame)) { + + //BGR (3 channels) to G (1 channel) + cvtColor(frame, frame, CV_BGR2GRAY); + + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + + Frame f = Frame(frame, 0, 0, to_iso_extended_string(time)); + + img = f; + img.mFrameNumber = mCap.get(CV_CAP_PROP_POS_FRAMES); + img.mFrameRemaining = mCap.get(CV_CAP_PROP_FRAME_COUNT) - mCap .get(CV_CAP_PROP_POS_FRAMES); + return true; + + } + + if(mCap.get(CV_CAP_PROP_FRAME_COUNT) - mCap .get(CV_CAP_PROP_POS_FRAMES) <=0) { + + mVideoID++; + mReadDataStatus = true; + + } + + return false; +} + + diff --git a/CameraVideo.h b/CameraVideo.h new file mode 100644 index 0000000..a827942 --- /dev/null +++ b/CameraVideo.h @@ -0,0 +1,141 @@ +/* + CameraVideo.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraVideo.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/06/2014 +* \brief Acquisition thread with video in input. +*/ + +#pragma once +#include "config.h" + +#include "opencv2/highgui/highgui.hpp" +#include + +#ifdef LINUX +#define BOOST_LOG_DYN_LINK 1 +#endif + +#include "Frame.h" +#include "SaveImg.h" +#include "TimeDate.h" +#include "Conversion.h" +#include "Camera.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "ELogSeverityLevel.h" +#include +#include + +using namespace boost::filesystem; +using namespace cv; +using namespace std; + +class CameraVideo : public Camera{ + + private: + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init{ + + public: + + Init(){ + + logger.add_attribute("ClassName", boost::log::attributes::constant("CameraVideo")); + + } + + }initializer; + + int mFrameWidth; + int mFrameHeight; + VideoCapture mCap; + bool mReadDataStatus; + int mVideoID; + vector mVideoList; + + public: + + CameraVideo(vector videoList, bool verbose); + + ~CameraVideo(void); + + bool createDevice(int id); + + bool acqStart() {return true;}; + + bool listCameras() {return true;}; + + bool grabImage(Frame &img); + + bool grabInitialization(); + + bool getStopStatus(); + + /** + * Get data status : Is there another video to use in input ? + * + * @return If there is still a video to load in input. + */ + bool getDataSetStatus(); + + /** + * Load next video if there is. + * + * @return Success status to load next data set. + */ + bool loadNextDataSet(string &location); + + bool getFPS(double &value) {value = 0; return false;}; + + bool setExposureTime(double exp){return true;}; + + bool setGain(int gain) {return true;}; + + bool setFPS(double fps){return true;}; + + bool setPixelFormat(CamPixFmt format){return true;}; + + bool setSize(int width, int height, bool customSize) {return true;}; + +}; + diff --git a/CameraWindows.cpp b/CameraWindows.cpp new file mode 100644 index 0000000..ca26d36 --- /dev/null +++ b/CameraWindows.cpp @@ -0,0 +1,290 @@ +/* + CameraWindows.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 02/10/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraWindows.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 02/10/2015 +*/ + +#include "CameraWindows.h" + +#ifdef WINDOWS + +CameraWindows::CameraWindows() { + + mVideoInput.setVerbose(false); + mExposureAvailable = false; + mGainAvailable = false; + mFrameCounter = 0; + mInputDeviceType = CAMERA; + mDevNumber = -1; + mBuffer = NULL; +} + +CameraWindows::~CameraWindows() +{ + if(mBuffer != NULL) + delete[] mBuffer; +} + +vector> CameraWindows::getCamerasList() { + + vector> camerasList; + + int nbCamFound = mVideoInput.listDevices(true); + + if(nbCamFound > 0) { + + for(int i = 0; i < nbCamFound; i++) { + + pair c; + c.first = i; + c.second = "NAME[" + string(mVideoInput.getDeviceName(i)) + "] SDK[VI]"; + camerasList.push_back(c); + + } + + } + + return camerasList; + +} + +bool CameraWindows::setSize(int width, int height, bool customSize) { + + if(customSize) + return mVideoInput.setupDevice(mDevNumber,width,height); + else + return mVideoInput.setupDevice(mDevNumber,640,480); + +} + + bool CameraWindows::grabSingleImage(Frame &frame, int camID) { + + int numDevices = mVideoInput.listDevices(true); + + if(frame.mWidth > 0 && frame.mHeight > 0) { + if(!mVideoInput.setupDevice(camID, frame.mWidth, frame.mHeight)) + return false; + }else{ + if(!mVideoInput.setupDevice(camID, 640, 480)) + return false; + } + + // As requested width and height can not always be accomodated make sure to check the size once the device is setup + mWidth = mVideoInput.getWidth(camID); + mHeight = mVideoInput.getHeight(camID); + mSize = mVideoInput.getSize(camID); + cout << ">> Size setted to : " << mWidth << "x" << mHeight << endl; + + // Create the buffer where the video will be captured + mBuffer = new unsigned char[mSize]; + + // Disable autofocus and set focus to 0 + // mVideoInput.setVideoSettingCamera(camID, CameraControl_Focus, mDefaultFocus, CameraControl_Flags_Manual); + + setPixelFormat(frame.mFormat); + setExposureTime(frame.mExposure); + setGain(frame.mGain); + + bool success = mVideoInput.getPixels(camID, mBuffer, false, true); + + if(success) { + + cv::Mat image( mHeight, mWidth, CV_8UC3, mBuffer ); + Mat img; + cv::cvtColor(image, img, CV_BGR2GRAY); + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + string acquisitionDate = to_iso_extended_string(time); + frame = Frame(img, 0, 0, acquisitionDate); + frame.mFps = 0; + frame.mFormat = MONO8; + frame.mSaturatedValue = 255; + frame.mFrameNumber = 0; + mVideoInput.stopDevice(camID); + return true; + + } + + std::cout << "Error loading frame from camera (Windows)." << std::endl; + mVideoInput.stopDevice(camID); + return false; + +}; + +bool CameraWindows::createDevice(int id){ + + mDevNumber = id; + return true; + +} + +bool CameraWindows::setPixelFormat(CamPixFmt format){ + cout << ">> (WARNING) Can't set format with VI." << endl; + return true; +} + +void CameraWindows::getExposureBounds(double &eMin, double &eMax){ + eMin = -1; + eMax = -1; +} + +void CameraWindows::getGainBounds(int &gMin, int &gMax){ + gMin = -1; + gMax = -1; +} + +bool CameraWindows::getFPS(double &value){ + value = 0; + return false; +} + +bool CameraWindows::setExposureTime(double value){ + cout << ">> (WARNING) Can't set exposure time with VI." << endl; + return true; +} + +bool CameraWindows::setGain(int value){ + cout << ">> (WARNING) Can't set gain with VI." << endl; + return true; +} + +bool CameraWindows::setFPS(double value){ + + // If you want to capture at a different frame rate (default is 30) specify it here, you are not guaranteed to get this fps though. + // Call before setupDevice + // directshow will try and get the closest possible framerate to what is requested + mVideoInput.setIdealFramerate(mDevNumber, (int)value); + + return true; + +} + +bool CameraWindows::setFpsToLowerValue(){ + return false; +} + +bool CameraWindows::grabInitialization() { + + // As requested width and height can not always be accomodated make sure to check the size once the device is setup + mWidth = mVideoInput.getWidth(mDevNumber); + mHeight = mVideoInput.getHeight(mDevNumber); + mSize = mVideoInput.getSize(mDevNumber); + cout << "Default size : " << mWidth << "x" << mHeight << endl; + + // Create the buffer where the video will be captured + mBuffer = new unsigned char[mSize]; + + // Disable autofocus and set focus to 0 + mVideoInput.setVideoSettingCamera(mDevNumber, CameraControl_Focus, mDefaultFocus, CameraControl_Flags_Manual); + + + //long current_value,min_value,max_value,stepping_delta,flags,defaultValue; + + //mVideoInput.getVideoSettingCamera(mDevNumber,mVideoInput.propBrightness ,min_value,max_value,stepping_delta,current_value,flags,defaultValue); + /*cout << "min: "<< min_value << endl; + cout << "max: "<< max_value << endl; + cout << "flags: "<< flags << endl; + cout << "SteppingDelta: "<< stepping_delta << endl; + cout << "currentValue: "<< current_value << endl; + cout << "defaultValue: "<< defaultValue << endl;*/ + //mVideoInput.showSettingsWindow(mDevNumber); + + return true; +} + +bool CameraWindows::acqStart(){return true;}; + +bool CameraWindows::grabImage(Frame &newFrame){ + + bool success = mVideoInput.getPixels(mDevNumber, mBuffer, false, true); + + if(success) { + + cv::Mat image( mHeight, mWidth, CV_8UC3, mBuffer ); + Mat img; + cv::cvtColor(image, img, CV_BGR2GRAY); + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + string acquisitionDate = to_iso_extended_string(time); + newFrame = Frame(img, 0, 0, acquisitionDate); + newFrame.mFps = 0; + newFrame.mFormat = MONO8; + newFrame.mSaturatedValue = 255; + newFrame.mFrameNumber = mFrameCounter; + mFrameCounter++; + return true; + + } + + std::cout << "Error loading frame from camera (Windows)." << std::endl; + return false; + +} + +void CameraWindows::acqStop(){ + mVideoInput.stopDevice(mDevNumber); +} + +void CameraWindows::grabCleanse(){ + +} + +bool CameraWindows::getPixelFormat(CamPixFmt &format){ + return false; +} + +// Return exposure time in seconds. +double CameraWindows::getExposureTime() { + + long min = 0, max = 0, SteppingDelta = 0 , currentValue = 0, flags = 0, defaultValue = 0; + + // https://msdn.microsoft.com/en-us/library/dd318253(v=vs.85).aspx + if(mVideoInput.getVideoSettingCamera(mDevNumber, CameraControl_Exposure, min, max, SteppingDelta, currentValue, flags, defaultValue)) { + + double e = 0.0; + + if(currentValue >= 0) { + + e = pow(2,currentValue); + + } else { + + e = 1.0 / pow(2,abs(currentValue)); + + } + + return e; + + } + + return 0.0; + +} + +#endif diff --git a/CameraWindows.h b/CameraWindows.h new file mode 100644 index 0000000..2963cbd --- /dev/null +++ b/CameraWindows.h @@ -0,0 +1,129 @@ +/* + CameraWindows.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 02/10/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CameraWindows.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 02/10/2015 +*/ + +#pragma once + +#include "config.h" + +#ifdef WINDOWS + +#include +#include "opencv2/highgui/highgui.hpp" +#include "videoInput.h" +#include "Frame.h" +#include "Camera.h" + +class CameraWindows: public Camera { + + private : + + int mDevNumber; + videoInput mVideoInput; + int mWidth; + int mHeight; + int mSize; + unsigned char * mBuffer; + int mFrameCounter; + + // see : http://msdn.microsoft.com/en-us/library/dd318253(v=vs.85).aspx + // and : http://msdn.microsoft.com/en-us/library/dd389148(v=vs.85).aspx + typedef enum { + + CameraControl_Pan, + CameraControl_Tilt, + CameraControl_Roll, + CameraControl_Zoom, + CameraControl_Exposure, + CameraControl_Iris, + CameraControl_Focus + + }CameraControlProperty; + + // see : http://msdn.microsoft.com/en-us/library/dd318251(v=vs.85).aspx + typedef enum { + + CameraControl_Flags_Auto = 0x0001, + CameraControl_Flags_Manual = 0x0002 + + }CameraControlFlags; + + static const long mDefaultFocus = 0; + + public: + + CameraWindows(); + + ~CameraWindows(); + + vector> getCamerasList(); + + bool grabSingleImage(Frame &frame, int camID); + + bool createDevice(int id); + + bool setPixelFormat(CamPixFmt format); + + void getExposureBounds(double &eMin, double &eMax); + + void getGainBounds(int &gMin, int &gMax); + + bool getFPS(double &value); + + bool setExposureTime(double value); + + bool setGain(int value); + + bool setFPS(double value); + + bool setSize(int width, int height, bool customSize); + + bool setFpsToLowerValue(); + + bool grabInitialization(); + + bool acqStart(); + + bool grabImage(Frame &newFrame); + + void acqStop(); + + void grabCleanse(); + + bool getPixelFormat(CamPixFmt &format); + + double getExposureTime(); + +}; +#endif + diff --git a/CfgLoader.cpp b/CfgLoader.cpp new file mode 100644 index 0000000..d919fff --- /dev/null +++ b/CfgLoader.cpp @@ -0,0 +1,196 @@ +/* + CfgLoader.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CfgLoader.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 13/06/2014 +* \brief Methods to fetch parameters from configuration file. +*/ + +#include "CfgLoader.h" + +CfgLoader::CfgLoader(void){} + +void CfgLoader::Clear(){ + + mData.clear(); + +} + +bool CfgLoader::Load(const string& file){ + + ifstream inFile(file.c_str()); + + if (!inFile.good()){ + return false; + } + + string prevKey = ""; + + while (inFile.good() && ! inFile.eof()){ + + string line; + getline(inFile, line); + + // filter out comments + if (!line.empty()){ + + int pos = line.find('#'); + + if (pos != string::npos){ + + line = line.substr(0, pos); + + } + } + + // split line into key and value + if (!line.empty()){ + + int pos = line.find('='); + + // "=" not found. + if (pos != string::npos){ + + string key = Trim(line.substr(0, pos)); + string value = Trim(line.substr(pos + 1)); + + if (!key.empty() && !value.empty()){ + + prevKey = key; + mData[key] = value; + + } + + }else if(line.size() > 1 && !prevKey.empty()){ + + mData[prevKey] += Trim(line); + + } + } + } + + return true; +} + +bool CfgLoader::Contains(const string& key) const{ + + return mData.find(key) != mData.end(); +} + +bool CfgLoader::Get(const string& key, string& value) const{ + + map::const_iterator iter = mData.find(key); + + if(iter != mData.end()){ + + value = iter->second; + return true; + + }else{ + + return false; + } +} + +bool CfgLoader::Get(const string& key, int& value) const{ + + string str; + + if(Get(key, str)){ + + value = atoi(str.c_str()); + return true; + + }else{ + + return false; + } +} + +bool CfgLoader::Get(const string& key, long& value) const{ + + string str; + + if(Get(key, str)){ + + value = atol(str.c_str()); + return true; + + }else{ + + return false; + } +} + +bool CfgLoader::Get(const string& key, double& value) const{ + + string str; + + if(Get(key, str)){ + + value = atof(str.c_str()); + return true; + + }else{ + + return false; + } +} + +bool CfgLoader::Get(const string& key, bool& value) const{ + + string str; + + if(Get(key, str)){ + + value = (str == "true"); + return true; + + }else{ + + return false; + } +} + +string CfgLoader::Trim(const string& str){ + + int first = str.find_first_not_of(" \t"); + + if(first != string::npos){ + + int last = str.find_last_not_of(" \t"); + + return str.substr(first, last - first + 1); + + }else{ + + return ""; + } +} diff --git a/CfgLoader.h b/CfgLoader.h new file mode 100644 index 0000000..01d0ea4 --- /dev/null +++ b/CfgLoader.h @@ -0,0 +1,138 @@ +/* + CfgLoader.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CfgLoader.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/06/2014 +* \brief Load parameters from a configuration file. +*/ + +#pragma once + +#include +#include +#include +#include +#include + +using namespace std; + +class CfgLoader{ + + private : + + map mData; // Container. + + public : + + /** + * Constructor. + * + */ + CfgLoader(void); + + /** + * Clear all values + * + */ + void Clear(); + + /** + * Load parameters name and value from configuration file. + * + * @param file Path of the configuration file. + * @return Success status to load parameters. + */ + bool Load(const string& file); + + /** + * Check if value associated with given key exists. + * + * @param key Freeture's parameter. + * @return Key has a value or not. + */ + bool Contains(const string& key) const; + + /** + * Get string value associated with given key + * + * @param key Freeture's parameter. + * @param value Key's value. + * @return Success to get value associated with given key. + */ + bool Get(const string& key, string& value) const; + + /** + * Get int value associated with given key + * + * @param key Freeture's parameter. + * @param value Key's value. + * @return Success to get value associated with given key. + */ + bool Get(const string& key, int& value) const; + + /** + * Get long value associated with given key + * + * @param key Freeture's parameter. + * @param value Key's value. + * @return Success to get value associated with given key. + */ + bool Get(const string& key, long& value) const; + + /** + * Get double value associated with given key + * + * @param key Freeture's parameter. + * @param value Key's value. + * @return Success to get value associated with given key. + */ + bool Get(const string& key, double& value) const; + + /** + * Get bool value associated with given key + * + * @param key Freeture's parameter. + * @param value Key's value. + * @return Success to get value associated with given key. + */ + bool Get(const string& key, bool& value) const; + + private : + + /** + * Remove spaces in configuration file's lines. + * + * @param str Configuration file's line. + * @return String without space. + */ + static string Trim(const string& str); + +}; + diff --git a/CfgParam.cpp b/CfgParam.cpp new file mode 100644 index 0000000..ef85677 --- /dev/null +++ b/CfgParam.cpp @@ -0,0 +1,1922 @@ +/* + CfgParam.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2016 Yoan Audureau, Chiara Marmo +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 13/05/2016 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CfgParam.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 13/06/2014 +* \brief Get parameters from configuration file. +*/ + +#include "CfgParam.h" + +boost::log::sources::severity_logger< LogSeverityLevel > CfgParam::logger; + +CfgParam::Init CfgParam::initializer; + +CfgParam::CfgParam(string cfgFilePath) { + + // Initialize parameters. + + showErrors = false; + + pair var1(-1, false); + pair,string> var2(var1, ""); + param.DEVICE_ID = var2; + param.data.status = false; + param.camInput.status = false; + param.det.status = false; + param.fitskeys.status = false; + param.framesInput.status = false; + param.log.status = false; + param.vidInput.status = false; + param.st.status = false; + param.station.status = false; + param.mail.status = false; + + param.data.DATA_PATH = "./"; + param.data.FITS_COMPRESSION = false; + param.data.FITS_COMPRESSION_METHOD = "[compress]"; + + param.log.LOG_ARCHIVE_DAY = 5; + param.log.LOG_PATH = "./"; + param.log.LOG_SEVERITY = notification; + param.log.LOG_SIZE_LIMIT = 50; + + vector finput, vinput; + param.framesInput.INPUT_FRAMES_DIRECTORY_PATH = finput; + param.vidInput.INPUT_VIDEO_PATH = vinput; + param.framesInput.INPUT_TIME_INTERVAL = 0; + param.vidInput.INPUT_TIME_INTERVAL = 0; + + param.camInput.ACQ_DAY_EXPOSURE = 0; + param.camInput.ACQ_DAY_GAIN = 0; + param.camInput.ACQ_FORMAT = MONO8; + param.camInput.ACQ_FPS = 30; + param.camInput.ACQ_HEIGHT = 480; + param.camInput.ACQ_NIGHT_EXPOSURE = 0; + param.camInput.ACQ_NIGHT_GAIN = 0; + param.camInput.ACQ_RES_CUSTOM_SIZE = false; + param.camInput.ACQ_WIDTH = 640; + param.camInput.ephem.EPHEMERIS_ENABLED = false; + param.camInput.ephem.SUNRISE_DURATION = 3600; + vectorsunrisetime, sunsettime; + sunrisetime.push_back(7); + sunrisetime.push_back(0); + sunsettime.push_back(22); + sunsettime.push_back(0); + param.camInput.ephem.SUNRISE_TIME = sunrisetime; + param.camInput.ephem.SUNSET_DURATION = 3600; + param.camInput.ephem.SUNSET_TIME = sunsettime; + param.camInput.EXPOSURE_CONTROL_FREQUENCY = 300; + param.camInput.EXPOSURE_CONTROL_SAVE_IMAGE = false; + param.camInput.EXPOSURE_CONTROL_SAVE_INFOS = false; + param.camInput.regcap.ACQ_REGULAR_ENABLED = false; + param.camInput.schcap.ACQ_SCHEDULE_ENABLED = false; + param.camInput.SHIFT_BITS = false; + + param.det.ACQ_BUFFER_SIZE = 15; + param.det.ACQ_MASK_ENABLED = false; + param.det.DET_DEBUG = false; + param.det.DET_DEBUG_UPDATE_MASK = false; + param.det.DET_DOWNSAMPLE_ENABLED = true; + param.det.DET_ENABLED = false; + + param.st.STACK_ENABLED = false; + + param.mail.MAIL_DETECTION_ENABLED = false; + + param.station.STATION_NAME = "STATION"; + param.station.SITEELEV = 0.0; + param.station.SITELAT = 0.0; + param.station.SITELONG = 0.0; + + // Load parameters. + + boost::filesystem::path pcfg(cfgFilePath); + if(boost::filesystem::exists(pcfg)) { + if(cfg.Load(cfgFilePath)) { + loadDeviceID(); + loadDataParam(); + loadLogParam(); + + if(param.DEVICE_ID.first.second) { + + // Get input type according to device number. + Device *device = new Device(); + device->setVerbose(false); + device->listDevices(false); + inputType = device->getDeviceType(device->getDeviceSdk(param.DEVICE_ID.first.first)); + delete device; + + switch(inputType) { + + case VIDEO : + loadVidParam(); + break; + + case SINGLE_FITS_FRAME : + loadFramesParam(); + break; + + // camera + case CAMERA : + loadCamParam(); + break; + + } + + + } + + loadDetParam(); + loadStackParam(); + loadStationParam(); + loadFitskeysParam(); + loadMailParam(); + }else{ + emsg.push_back("Fail to load configuration file."); + cout << "Fail to load configuration file." << endl; + } + }else{ + emsg.push_back("Configuration file path not exists : " + cfgFilePath); + cout << "Configuration file path not exists : " << cfgFilePath << endl; + } +} + +void CfgParam::loadDeviceID() { + + pair var1; + pair,string> var2; + param.DEVICE_ID = var2; + + Device *device = new Device(); + device->setVerbose(false); + device->listDevices(false); + + int cId; + string cString; + bool failIntId = false, failStringId = false; + string failmsg = "- CAMERA_ID : "; + + if(!cfg.Get("CAMERA_ID", cId)) { + failIntId = true; + failmsg += "Fail to get value. Probably not defined.\n"; + } + + if(!cfg.Get("CAMERA_ID", cString)) { + failStringId = true; + failmsg += "Fail to get value. Probably not defined.\n"; + }else{ + try{ + EParser cam_string; + CamSdkType cType = cam_string.parseEnum("CAMERA_ID", cString); + + if(cType == VIDEOFILE) { + cId = device->mNbDev - 2; + }else if(cType == FRAMESDIR){ + cId = device->mNbDev - 1; + }else{ + failmsg += "Not correct input.\n"; + failStringId = true; + } + + }catch (std::exception &ex) { + failmsg += string(ex.what()); + failStringId = true; + } + } + + if(failIntId && failStringId) { + param.DEVICE_ID.second = failmsg; + delete device; + return; + } + + if(device->mNbDev < 0 || cId > (device->mNbDev - 1)){ + param.DEVICE_ID.second = "- CAMERA_ID's value not exist."; + delete device; + return; + } + + param.DEVICE_ID.first.first = cId; + param.DEVICE_ID.first.second = true; + param.DEVICE_ID.second = ""; + + delete device; + +} + +void CfgParam::loadDataParam() { + + bool e = false; + + if(!cfg.Get("DATA_PATH", param.data.DATA_PATH)) { + param.data.errormsg.push_back("- DATA_PATH : Fail to get value."); + e = true; + }else{ + + namespace fs = boost::filesystem; + path p(param.data.DATA_PATH); + + if(!fs::exists(p)){ + if(!fs::create_directory(p)){ + e = true; + param.data.errormsg.push_back("- DATA_PATH : Can't create Data Path directory."); + } + } + } + + if(!cfg.Get("FITS_COMPRESSION", param.data.FITS_COMPRESSION)) { + param.data.errormsg.push_back("- FITS_COMPRESSION : Fail to get value."); + e = true; + }else{ + + param.data.FITS_COMPRESSION_METHOD = ""; + + if(param.data.FITS_COMPRESSION) { + if(!cfg.Get("FITS_COMPRESSION_METHOD", param.data.FITS_COMPRESSION_METHOD)) { + param.data.errormsg.push_back("- FITS_COMPRESSION_METHOD : Fail to get value."); + e = true; + } + } + } + + if(!e) param.data.status = true; +} + +void CfgParam::loadLogParam() { + + bool e = false; + + if(!cfg.Get("LOG_PATH", param.log.LOG_PATH)) { + param.log.errormsg.push_back("- LOG_PATH : Fail to get value."); + e = true; + }else{ + + namespace fs = boost::filesystem; + path p(param.log.LOG_PATH); + + if(!fs::exists(p)){ + if(!fs::create_directory(p)){ + e = true; + param.log.errormsg.push_back("- LOG_PATH : Can't create Log Path directory."); + } + } + } + + if(!cfg.Get("LOG_ARCHIVE_DAY", param.log.LOG_ARCHIVE_DAY)) { + param.log.errormsg.push_back("- LOG_ARCHIVE_DAY : Fail to get value."); + e = true; + } + + if(!cfg.Get("LOG_SIZE_LIMIT", param.log.LOG_SIZE_LIMIT)) { + param.log.errormsg.push_back("- LOG_SIZE_LIMIT : Fail to get value."); + e = true; + } + + string log_severity; + EParser log_sev; + if(!cfg.Get("LOG_SEVERITY", log_severity)) { + param.log.errormsg.push_back("- LOG_SEVERITY : Fail to get value."); + e = true; + } + + try { + param.log.LOG_SEVERITY = log_sev.parseEnum("LOG_SEVERITY", log_severity); + }catch (std::exception &ex) { + param.log.errormsg.push_back("- LOG_SEVERITY : " + string(ex.what())); + e = true; + } + + if(!e) param.log.status = true; +} + +void CfgParam::loadFramesParam() { + + bool e = false; + + if(!cfg.Get("INPUT_TIME_INTERVAL", param.framesInput.INPUT_TIME_INTERVAL)) { + param.framesInput.errormsg.push_back("- INPUT_TIME_INTERVAL : Fail to get value."); + //cout << "- INPUT_FRAMES_DIRECTORY_PATH : Fail to get value." << endl; + e = true; + } + + string inputPaths; + if(!cfg.Get("INPUT_FRAMES_DIRECTORY_PATH", inputPaths)) { + param.framesInput.errormsg.push_back("- INPUT_FRAMES_DIRECTORY_PATH : Fail to get value."); + //cout << "- INPUT_FRAMES_DIRECTORY_PATH : Fail to get value." << endl; + e = true; + } + + typedef boost::tokenizer > tokenizer; + boost::char_separator sep(","); + tokenizer tokens(inputPaths, sep); + + for(tokenizer::iterator tok_iter = tokens.begin();tok_iter != tokens.end(); ++tok_iter){ + boost::filesystem::path p_input_frames_dir(*tok_iter); + if(!boost::filesystem::exists(p_input_frames_dir)) { + param.framesInput.errormsg.push_back("- INPUT_FRAMES_DIRECTORY_PATH : " + *tok_iter + " not exists."); + e = true; + }else{ + param.framesInput.INPUT_FRAMES_DIRECTORY_PATH.push_back(*tok_iter); + } + } + + if(!e) param.framesInput.status = true; +} + +void CfgParam::loadVidParam() { + + bool e = false; + + if(!cfg.Get("INPUT_TIME_INTERVAL", param.vidInput.INPUT_TIME_INTERVAL)) { + param.vidInput.errormsg.push_back("- INPUT_TIME_INTERVAL : Fail to get value."); + //cout << "- INPUT_FRAMES_DIRECTORY_PATH : Fail to get value." << endl; + e = true; + } + + string input_video_path; + if(!cfg.Get("INPUT_VIDEO_PATH", input_video_path)) { + param.vidInput.errormsg.push_back("- INPUT_VIDEO_PATH : Fail to get value."); + e = true; + } + + typedef boost::tokenizer > tokenizer; + boost::char_separator sep(","); + tokenizer tokens(input_video_path, sep); + + for(tokenizer::iterator tok_iter = tokens.begin();tok_iter != tokens.end(); ++tok_iter){ + boost::filesystem::path p_input_video_path(*tok_iter); + if(!is_regular_file(p_input_video_path)) { + param.vidInput.errormsg.push_back("- INPUT_VIDEO_PATH : " + *tok_iter + " not exists."); + e = true; + }else{ + param.vidInput.INPUT_VIDEO_PATH.push_back(*tok_iter); + } + } + + if(!e) param.vidInput.status = true; + +} + +void CfgParam::loadCamParam() { + + bool e = false; + + if(!param.DEVICE_ID.first.second) { + + loadDeviceID(); + if(!param.DEVICE_ID.first.second) { + return; + } + } + + Device *device = new Device(); + device->setVerbose(false); + device->listDevices(false); + + if(!device->createCamera(param.DEVICE_ID.first.first, true)) { + delete device; + return; + } + + if(!cfg.Get("ACQ_FPS", param.camInput.ACQ_FPS)) { + param.camInput.errormsg.push_back("- ACQ_FPS : Fail to get value."); + e = true; + } + + //------------------------------------------------------------------- + + string pixfmt; + if(!cfg.Get("ACQ_FORMAT", pixfmt)) { + param.camInput.errormsg.push_back("- ACQ_FORMAT : Fail to get value."); + e = true; + }else { + try { + EParser camPixFmt; + param.camInput.ACQ_FORMAT = camPixFmt.parseEnum("ACQ_FORMAT", pixfmt); + }catch (std::exception &ex) { + param.camInput.errormsg.push_back("- ACQ_FORMAT : " + string(ex.what())); + e = true; + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("ACQ_RES_CUSTOM_SIZE", param.camInput.ACQ_RES_CUSTOM_SIZE)) { + param.camInput.errormsg.push_back("- ACQ_RES_CUSTOM_SIZE : Fail to get value."); + e = true; + }else{ + + if(param.camInput.ACQ_RES_CUSTOM_SIZE) { + + string acq_res_custome_size; + if(!cfg.Get("ACQ_RES_SIZE", acq_res_custome_size)) { + param.camInput.errormsg.push_back("- ACQ_RES_SIZE : Fail to get value."); + e = true; + }else { + + if(acq_res_custome_size.find("x") != std::string::npos) { + + string width = acq_res_custome_size.substr(0,acq_res_custome_size.find("x")); + string height = acq_res_custome_size.substr(acq_res_custome_size.find("x")+1,string::npos); + int mSizeWidth = atoi(width.c_str()); + int mSizeHeight = atoi(height.c_str()); + + if(mSizeHeight <= 0) { + param.camInput.errormsg.push_back("- ACQ_RES_SIZE : Height value is not correct."); + e = true; + }else{ + param.camInput.ACQ_HEIGHT = mSizeHeight; + } + + if(mSizeWidth <= 0) { + param.camInput.errormsg.push_back("- ACQ_RES_SIZE : Width value is not correct."); + e = true; + }else{ + param.camInput.ACQ_WIDTH = mSizeWidth; + } + + }else { + param.camInput.errormsg.push_back("- ACQ_RES_SIZE : Format is not correct. It must be : WxH."); + e = true; + } + } + }else { + + param.camInput.ACQ_HEIGHT = 480; + param.camInput.ACQ_WIDTH = 640; + + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("SHIFT_BITS", param.camInput.SHIFT_BITS)) { + param.camInput.errormsg.push_back("- SHIFT_BITS : Fail to get value."); + e = true; + } + + int ming= -1, maxg = -1; + double mine = -1, maxe = -1; + device->getCameraGainBounds(ming, maxg); + device->getCameraExposureBounds(mine, maxe); + + //------------------------------------------------------------------- + + if(!cfg.Get("ACQ_NIGHT_EXPOSURE", param.camInput.ACQ_NIGHT_EXPOSURE)) { + param.camInput.errormsg.push_back("- ACQ_NIGHT_EXPOSURE : Fail to get value."); + e = true; + }else{ + + if(mine != -1 && maxe != -1) { + if(param.camInput.ACQ_NIGHT_EXPOSURE < mine || param.camInput.ACQ_NIGHT_EXPOSURE > maxe) { + param.camInput.errormsg.push_back("- ACQ_NIGHT_EXPOSURE : Value <" + + Conversion::intToString(param.camInput.ACQ_NIGHT_EXPOSURE) + + "> is not correct. \nAvailable range is from " + + Conversion::intToString(mine) + " to " + + Conversion::intToString(maxe)); + e = true; + } + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("ACQ_NIGHT_GAIN", param.camInput.ACQ_NIGHT_GAIN)) { + param.camInput.errormsg.push_back("- ACQ_NIGHT_GAIN : Fail to get value."); + e = true; + }else{ + + if(ming != -1 && maxg != -1) { + if(param.camInput.ACQ_NIGHT_GAIN < ming || param.camInput.ACQ_NIGHT_GAIN > maxg) { + param.camInput.errormsg.push_back("- ACQ_NIGHT_GAIN : Value <" + + Conversion::intToString(param.camInput.ACQ_NIGHT_GAIN) + + "> is not correct. \nAvailable range is from " + + Conversion::intToString(ming) + " to " + + Conversion::intToString(maxg)); + e = true; + } + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("ACQ_DAY_EXPOSURE", param.camInput.ACQ_DAY_EXPOSURE)) { + param.camInput.errormsg.push_back("- ACQ_DAY_EXPOSURE : Fail to get value."); + e = true; + }else{ + + if(mine != -1 && maxe != -1) { + if(param.camInput.ACQ_DAY_EXPOSURE < mine || param.camInput.ACQ_DAY_EXPOSURE > maxe) { + param.camInput.errormsg.push_back("- ACQ_DAY_EXPOSURE : Value <" + + Conversion::intToString(param.camInput.ACQ_DAY_EXPOSURE) + + "> is not correct. \nAvailable range is from " + + Conversion::intToString(mine) + " to " + + Conversion::intToString(maxe)); + e = true; + } + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("ACQ_DAY_GAIN", param.camInput.ACQ_DAY_GAIN)) { + param.camInput.errormsg.push_back("- ACQ_DAY_GAIN : Fail to get value."); + e = true; + }else{ + + if(ming != -1 && maxg != -1) { + if(param.camInput.ACQ_DAY_GAIN < ming || param.camInput.ACQ_DAY_GAIN > maxg) { + param.camInput.errormsg.push_back("- ACQ_DAY_GAIN : Value <" + + Conversion::intToString(param.camInput.ACQ_DAY_GAIN) + + "> is not correct. \nAvailable range is from " + + Conversion::intToString(ming) + " to " + + Conversion::intToString(maxg)); + e = true; + } + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("EXPOSURE_CONTROL_FREQUENCY", param.camInput.EXPOSURE_CONTROL_FREQUENCY)) { + param.camInput.errormsg.push_back("- EXPOSURE_CONTROL_FREQUENCY : Fail to get value."); + e = true; + } + + //------------------------------------------------------------------- + + if(!cfg.Get("EXPOSURE_CONTROL_SAVE_IMAGE", param.camInput.EXPOSURE_CONTROL_SAVE_IMAGE)) { + param.camInput.errormsg.push_back("- EXPOSURE_CONTROL_SAVE_IMAGE : Fail to get value."); + e = true; + } + + //------------------------------------------------------------------- + + if(!cfg.Get("EXPOSURE_CONTROL_SAVE_INFOS", param.camInput.EXPOSURE_CONTROL_SAVE_INFOS)) { + param.camInput.errormsg.push_back("- EXPOSURE_CONTROL_SAVE_INFOS : Fail to get value."); + e = true; + } + + //------------------------------------------------------------------- + + if(!cfg.Get("EPHEMERIS_ENABLED", param.camInput.ephem.EPHEMERIS_ENABLED)) { + param.camInput.errormsg.push_back("- EPHEMERIS_ENABLED : Fail to get value."); + e = true; + } + + //------------------------------------------------------------------- + if(param.camInput.ephem.EPHEMERIS_ENABLED) { + if(!cfg.Get("SUN_HORIZON_1", param.camInput.ephem.SUN_HORIZON_1)) { + param.camInput.errormsg.push_back("- SUN_HORIZON_1 : Fail to get value."); + e = true; + } + + //------------------------------------------------------------------- + + if(!cfg.Get("SUN_HORIZON_2", param.camInput.ephem.SUN_HORIZON_2)) { + param.camInput.errormsg.push_back("- SUN_HORIZON_2 : Fail to get value."); + e = true; + } + } + + //------------------------------------------------------------------- + + if(!param.camInput.ephem.EPHEMERIS_ENABLED) { + + string sunrise_time; + if(!cfg.Get("SUNRISE_TIME", sunrise_time)) { + param.camInput.errormsg.push_back("- SUNRISE_TIME : Fail to get value."); + e = true; + }else{ + + if(sunrise_time.find(":") != std::string::npos) { + + typedef boost::tokenizer > tokenizer; + boost::char_separator sep(":"); + tokenizer tokens(sunrise_time, sep); + + param.camInput.ephem.SUNRISE_TIME.clear(); + for(tokenizer::iterator tok_iter = tokens.begin();tok_iter != tokens.end(); ++tok_iter){ + param.camInput.ephem.SUNRISE_TIME.push_back(atoi((*tok_iter).c_str())); + } + + if(param.camInput.ephem.SUNRISE_TIME.size() == 2) { + if(param.camInput.ephem.SUNRISE_TIME.at(0) < 0 || param.camInput.ephem.SUNRISE_TIME.at(0) >= 24) { + param.camInput.errormsg.push_back("- SUNRISE_TIME : Hours value must be between 0 - 23"); + e = true; + } + + if(param.camInput.ephem.SUNRISE_TIME.at(1) < 0 || param.camInput.ephem.SUNRISE_TIME.at(0) >= 60) { + param.camInput.errormsg.push_back("- SUNRISE_TIME : Minutes value must be between 0 - 59"); + e = true; + } + } + + }else { + param.camInput.errormsg.push_back("- SUNRISE_TIME : Format is not correct. It must be : HH:MM"); + e = true; + } + } + + //------------------------------------------------------------------- + + string sunset_time; + if(!cfg.Get("SUNSET_TIME", sunset_time)) { + param.camInput.errormsg.push_back("- SUNSET_TIME : Fail to get value."); + e = true; + }else{ + + if(sunset_time.find(":") != std::string::npos) { + + typedef boost::tokenizer > tokenizer; + boost::char_separator sep(":"); + tokenizer tokens(sunset_time, sep); + + param.camInput.ephem.SUNSET_TIME.clear(); + for(tokenizer::iterator tok_iter = tokens.begin();tok_iter != tokens.end(); ++tok_iter) + param.camInput.ephem.SUNSET_TIME.push_back(atoi((*tok_iter).c_str())); + + if(param.camInput.ephem.SUNSET_TIME.size() == 2) { + if(param.camInput.ephem.SUNSET_TIME.at(0) < 0 || param.camInput.ephem.SUNSET_TIME.at(0) >= 24) { + param.camInput.errormsg.push_back("- SUNSET_TIME : Hours value must be between 0 - 23"); + e = true; + } + + if(param.camInput.ephem.SUNSET_TIME.at(1) < 0 || param.camInput.ephem.SUNSET_TIME.at(0) >= 60) { + param.camInput.errormsg.push_back("- SUNSET_TIME : Minutes value must be between 0 - 59"); + e = true; + } + } + + }else { + param.camInput.errormsg.push_back("- SUNSET_TIME : Format is not correct. It must be : HH:MM"); + e = true; + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("SUNSET_DURATION", param.camInput.ephem.SUNSET_DURATION)) { + param.camInput.errormsg.push_back("- SUNSET_DURATION : Fail to get value."); + e = true; + } + + //------------------------------------------------------------------- + + if(!cfg.Get("SUNRISE_DURATION", param.camInput.ephem.SUNRISE_DURATION)) { + param.camInput.errormsg.push_back("- SUNRISE_DURATION : Fail to get value."); + e = true; + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("ACQ_REGULAR_ENABLED", param.camInput.regcap.ACQ_REGULAR_ENABLED)) { + param.camInput.errormsg.push_back("- ACQ_REGULAR_ENABLED : Fail to get value."); + e = true; + }else { + if(param.camInput.regcap.ACQ_REGULAR_ENABLED) { + //------------------------------------------------------------------- + + string reg_mode; + if(!cfg.Get("ACQ_REGULAR_MODE", reg_mode)) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_MODE : Fail to load value."); + }else { + try { + EParser regMode; + param.camInput.regcap.ACQ_REGULAR_MODE = regMode.parseEnum("ACQ_REGULAR_MODE", reg_mode); + }catch (std::exception &ex) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_MODE : " + string(ex.what())); + } + } + + //------------------------------------------------------------------- + + { + + string img_prefix; + if(!cfg.Get("ACQ_REGULAR_PRFX", img_prefix)) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_PRFX : Fail to load value."); + }else { + param.camInput.regcap.ACQ_REGULAR_PRFX = img_prefix; + } + } + + //------------------------------------------------------------------- + + { + + string img_output; + if(!cfg.Get("ACQ_REGULAR_OUTPUT", img_output)) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_OUTPUT : Fail to load value."); + }else { + try { + EParser imgOutput; + param.camInput.regcap.ACQ_REGULAR_OUTPUT = imgOutput.parseEnum("ACQ_REGULAR_OUTPUT", img_output); + }catch (std::exception &ex) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_OUTPUT : " + string(ex.what())); + } + } + } + + //------------------------------------------------------------------- + + string regAcqParam; + if(!cfg.Get("ACQ_REGULAR_CFG", regAcqParam)) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_CFG : Fail to load value."); + }else { + std::transform(regAcqParam.begin(), regAcqParam.end(),regAcqParam.begin(), ::toupper); + + typedef boost::tokenizer > tokenizer1; + boost::char_separator sep1("HMSEGFN"); + tokenizer1 tokens1(regAcqParam, sep1); + + vector res1; + for(tokenizer1::iterator tokIter = tokens1.begin();tokIter != tokens1.end(); ++tokIter) + res1.push_back(*tokIter); + + if(res1.size() == 7) { + + // Get regular acquisition time interval. + if(atoi(res1.at(0).c_str())< 0 || atoi(res1.at(0).c_str()) >= 24) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_CFG : Hours can't have the value <" + res1.at(0) + ">.\nAvailable range is from 0 to 23."); + } + + if(atoi(res1.at(1).c_str())< 0 || atoi(res1.at(1).c_str()) >= 60) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_CFG : Minutes can't have the value <" + res1.at(1) + ">.\nAvailable range is from 0 to 23."); + } + + if(atoi(res1.at(2).c_str())< 0 || atoi(res1.at(2).c_str()) >= 60) { + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_CFG : Seconds can't have the value <" + res1.at(2) + ">.\nAvailable range is from 0 to 23."); + } + + param.camInput.regcap.ACQ_REGULAR_CFG.interval = atoi(res1.at(0).c_str()) * 3600 + atoi(res1.at(1).c_str()) * 60 + atoi(res1.at(2).c_str()); + + // Get regular acquisition exposure time. + param.camInput.regcap.ACQ_REGULAR_CFG.exp = atoi(res1.at(3).c_str()); + + if(mine != -1 && maxe != -1) { + if(param.camInput.regcap.ACQ_REGULAR_CFG.exp < mine || param.camInput.regcap.ACQ_REGULAR_CFG.exp > maxe) { + param.camInput.errormsg.push_back("- ACQ_REGULAR_CFG : Exposure value <" + + Conversion::intToString(param.camInput.regcap.ACQ_REGULAR_CFG.exp) + + "> is not correct. \nAvailable range is from " + + Conversion::intToString(mine) + " to " + + Conversion::intToString(maxe)); + e = true; + } + } + + // Get regular acquisition gain. + param.camInput.regcap.ACQ_REGULAR_CFG.gain = atoi(res1.at(4).c_str()); + + if(ming != -1 && maxg != -1) { + if(param.camInput.regcap.ACQ_REGULAR_CFG.gain < ming || param.camInput.regcap.ACQ_REGULAR_CFG.gain > maxg) { + param.camInput.errormsg.push_back("- ACQ_REGULAR_CFG : Gain value <" + + Conversion::intToString(param.camInput.regcap.ACQ_REGULAR_CFG.gain) + + "> is not correct. \nAvailable range is from " + + Conversion::intToString(ming) + " to " + + Conversion::intToString(maxg)); + e = true; + } + } + + // Get regular acquisition repetition. + param.camInput.regcap.ACQ_REGULAR_CFG.rep = atoi(res1.at(6).c_str()); + + // Get regular acquisition format. + param.camInput.regcap.ACQ_REGULAR_CFG.fmt = static_cast(atoi(res1.at(5).c_str())); + EParser fmt; + if(fmt.getStringEnum(param.camInput.regcap.ACQ_REGULAR_CFG.fmt) == ""){ + e = true; + param.camInput.errormsg.push_back("- ACQ_REGULAR_CFG : Fail to extract pixel format on " + regAcqParam + ". Check if index <" + res1.at(5) + "> exits."); + } + } + } + } + } + + //------------------------------------------------------------------- + + if(!cfg.Get("ACQ_SCHEDULE_ENABLED", param.camInput.schcap.ACQ_SCHEDULE_ENABLED)) { + param.camInput.errormsg.push_back("- ACQ_SCHEDULE_ENABLED : Fail to get value."); + e = true; + }else{ + + if(param.camInput.schcap.ACQ_SCHEDULE_ENABLED) { + + if(!param.camInput.regcap.ACQ_REGULAR_ENABLED) { + //------------------------------------------------------------------- + + { + string img_output; + if(!cfg.Get("ACQ_SCHEDULE_OUTPUT", img_output)) { + e = true; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE_OUTPUT : Fail to load value."); + }else { + try { + EParser imgOutput; + param.camInput.schcap.ACQ_SCHEDULE_OUTPUT = imgOutput.parseEnum("ACQ_SCHEDULE_OUTPUT", img_output); + }catch (std::exception &ex) { + e = true; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE_OUTPUT : " + string(ex.what())); + } + } + } + + //------------------------------------------------------------------- + + { + string sACQ_SCHEDULE; + if(!cfg.Get("ACQ_SCHEDULE", sACQ_SCHEDULE)) { + e = true; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : Fail to load value."); + }else { + + vector sch1; + + typedef boost::tokenizer > tokenizer; + boost::char_separator sep(","); + tokenizer tokens(sACQ_SCHEDULE, sep); + + for(tokenizer::iterator tok_iter = tokens.begin();tok_iter != tokens.end(); ++tok_iter) { + string s = *tok_iter; + std::transform(s.begin(), s.end(),s.begin(), ::toupper); + sch1.push_back(s); + } + + for(int i = 0; i < sch1.size(); i++) { + + typedef boost::tokenizer > tokenizer_; + boost::char_separator sep_("HMSEGFN"); + tokenizer tokens_(sch1.at(i), sep_); + + vector sp; + + for(tokenizer::iterator tok_iter_ = tokens_.begin();tok_iter_ != tokens_.end(); ++tok_iter_) + sp.push_back(*tok_iter_); + + if(sp.size() == 7) { + + scheduleParam spa; + bool status = true; + spa.hours = atoi(sp.at(0).c_str()); + if(spa.hours< 0 || spa.hours >= 24) { + e = true; + status = false; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : In " + sch1.at(i) + ". Hours can't have the value <" + Conversion::intToString(spa.hours) + ">.\nAvailable range is from 0 to 23."); + } + spa.min = atoi(sp.at(1).c_str()); + if(spa.min< 0 || spa.min >= 60) { + e = true; + status = false; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : In " + sch1.at(i) + ". Minutes can't have the value <" + Conversion::intToString(spa.min) + ">.\nAvailable range is from 0 to 59."); + } + spa.sec = atoi(sp.at(2).c_str()); + if(spa.sec< 0 || spa.sec >= 60) { + e = true; + status = false; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : In " + sch1.at(i) + ". Seconds can't have the value <" + Conversion::intToString(spa.sec) + ">.\nAvailable range is from 0 to 59."); + } + spa.exp = atoi(sp.at(3).c_str()); + + if(mine != -1 && maxe != -1) { + if(spa.exp < mine || spa.exp > maxe) { + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : In " + sch1.at(i) + ". Exposure value <" + + Conversion::intToString(spa.exp) + + "> is not correct. \nAvailable range is from " + + Conversion::intToString(mine) + " to " + + Conversion::intToString(maxe)); + e = true; + status = false; + } + } + + spa.gain = atoi(sp.at(4).c_str()); + + if(ming != -1 && maxg != -1) { + if(spa.gain < ming || spa.gain > maxg) { + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : In " + sch1.at(i) + ". Gain value <" + + Conversion::intToString(spa.gain) + + "> is not correct. \nAvailable range is from " + + Conversion::intToString(ming) + " to " + + Conversion::intToString(maxg)); + e = true; + status = false; + } + } + + spa.rep = atoi(sp.at(6).c_str()); + if(spa.rep< 0 || spa.rep >= 60) { + e = true; + status = false; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : One repetition must be defined at least."); + } + spa.fmt = static_cast(atoi(sp.at(5).c_str())); + EParser fmt; + if(fmt.getStringEnum(spa.fmt) == ""){ + e = true; + status = false; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : Fail to extract pixel format for : " + sch1.at(i) + ". Index <" + sp.at(5) + "> not exist."); + } + + if(status) + param.camInput.schcap.ACQ_SCHEDULE.push_back(spa); + + } + } + + // Order scheduled acquisition + + if(param.camInput.schcap.ACQ_SCHEDULE.size() != 0){ + + // Sort time in list. + vector tempSchedule; + + do{ + + int minH; int minM; int minS; bool init = false; + + vector::iterator it; + vector::iterator it_select; + + for(it = param.camInput.schcap.ACQ_SCHEDULE.begin(); it != param.camInput.schcap.ACQ_SCHEDULE.end(); ++it){ + + if(!init){ + + minH = (*it).hours; + minM = (*it).min; + minS = (*it).sec; + it_select = it; + init = true; + + }else{ + + if((*it).hours < minH){ + + minH = (*it).hours; + minM = (*it).min; + minS = (*it).sec; + it_select = it; + + }else if((*it).hours == minH){ + + if((*it).min < minM){ + + minH = (*it).hours; + minM = (*it).min; + minS = (*it).sec; + it_select = it; + + }else if((*it).min == minM){ + + if((*it).sec < minS){ + + minH = (*it).hours; + minM = (*it).min; + minS = (*it).sec; + it_select = it; + + } + + } + + } + + } + + } + + if(init){ + + tempSchedule.push_back((*it_select)); + //cout << "-> " << (*it_select).hours << "H " << (*it_select).min << "M " << (*it_select).sec << "S " << endl; + param.camInput.schcap.ACQ_SCHEDULE.erase(it_select); + + } + + }while(param.camInput.schcap.ACQ_SCHEDULE.size() != 0); + + param.camInput.schcap.ACQ_SCHEDULE = tempSchedule; + + } + } + } + + }else{ + e = true; + param.camInput.errormsg.push_back("- ACQ_SCHEDULE : Disable ACQ_REGULAR_ENABLED to use ACQ_SCHEDULE."); + } + } + } + + if(device != NULL) { + delete device; + } + + if(!e) param.camInput.status = true; +} + +void CfgParam::loadDetParam() { + + bool e = false; + + if(!cfg.Get("ACQ_BUFFER_SIZE", param.det.ACQ_BUFFER_SIZE)) { + e = true; + param.det.errormsg.push_back("- ACQ_BUFFER_SIZE : Fail to load value."); + } + + if(!cfg.Get("ACQ_MASK_ENABLED", param.det.ACQ_MASK_ENABLED)) { + e = true; + param.det.errormsg.push_back("- ACQ_MASK_ENABLED : Fail to load value."); + }else{ + + if(param.det.ACQ_MASK_ENABLED) { + + if(!cfg.Get("ACQ_MASK_PATH", param.det.ACQ_MASK_PATH)) { + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Fail to load value."); + }else { + Mat tempmask = imread(param.det.ACQ_MASK_PATH, CV_LOAD_IMAGE_GRAYSCALE); + + if(!tempmask.data) { + e = true; + param.det.errormsg.push_back("- MASK : Fail to load the mask image. No data."); + // Add test to compare mask size to a capture from camera or video or frame file + }else { + + tempmask.copyTo(param.det.MASK); + + if(param.DEVICE_ID.first.second) { + + Device *device = new Device(); + device->setVerbose(false); + device->listDevices(false); + inputType = device->getDeviceType(device->getDeviceSdk(param.DEVICE_ID.first.first)); + + switch(inputType) { + + case VIDEO : + { + if(vidParamIsCorrect()) { + + for(int i = 0; i < param.vidInput.INPUT_VIDEO_PATH.size(); i++) { + VideoCapture cap = VideoCapture(param.vidInput.INPUT_VIDEO_PATH.at(i)); + if(cap.isOpened()) { + if(cap.get(CV_CAP_PROP_FRAME_HEIGHT) != tempmask.rows) { + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Mask's height (" + + Conversion::intToString(tempmask.rows) + + ") is not correct with " + param.vidInput.INPUT_VIDEO_PATH.at(i) + " (" + + Conversion::intToString(cap.get(CV_CAP_PROP_FRAME_HEIGHT)) + ")"); + } + + if(cap.get(CV_CAP_PROP_FRAME_WIDTH) != tempmask.cols) { + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Mask's width (" + + Conversion::intToString(tempmask.cols) + + ") is not correct with " + param.vidInput.INPUT_VIDEO_PATH.at(i) + " (" + + Conversion::intToString(cap.get(CV_CAP_PROP_FRAME_WIDTH)) + ")"); + } + }else{ + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Check mask's size. Fail to open " + param.vidInput.INPUT_VIDEO_PATH.at(i)); + } + } + + }else{ + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Check mask's size. Video parameters loading failed."); + } + } + break; + + case SINGLE_FITS_FRAME : + + { + if(framesParamIsCorrect()) { + for(int i = 0; i < param.framesInput.INPUT_FRAMES_DIRECTORY_PATH.size(); i++) { + // Search a fits file. + bool fitsfilefound = false; + string filefound = ""; + path p(param.framesInput.INPUT_FRAMES_DIRECTORY_PATH.at(i)); + for(directory_iterator file(p);file!= directory_iterator(); ++file){ + path curr(file->path()); + if(is_regular_file(curr)) { + if(file->path().string().find(".fit") != std::string::npos) { + fitsfilefound = true; + filefound = file->path().string(); + break; + } + } + } + + if(fitsfilefound) { + Fits2D f(filefound); + int h = 0,w = 0; + + if(!f.readIntKeyword("NAXIS1", w)){ + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Check mask's size. Fail to read NAXIS1. " + param.framesInput.INPUT_FRAMES_DIRECTORY_PATH.at(i)); + } + + if(!f.readIntKeyword("NAXIS2", h)){ + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Check mask's size. Fail to read NAXIS2. " + param.framesInput.INPUT_FRAMES_DIRECTORY_PATH.at(i)); + } + + if(h!=0 && w!=0) { + + if(h != tempmask.rows) { + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Mask's height (" + + Conversion::intToString(tempmask.rows) + + ") is not correct with " + param.framesInput.INPUT_FRAMES_DIRECTORY_PATH.at(i) + " (" + + Conversion::intToString(h) + ")"); + } + + if(w != tempmask.cols) { + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Mask's width (" + + Conversion::intToString(tempmask.cols) + + ") is not correct with " + param.framesInput.INPUT_FRAMES_DIRECTORY_PATH.at(i) + " (" + + Conversion::intToString(w) + ")"); + } + } + + }else{ + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Check mask's size. No fits file found in " + param.framesInput.INPUT_FRAMES_DIRECTORY_PATH.at(i)); + } + } + } + } + + break; + + case CAMERA : + { + /*if(camParamIsCorrect()) { + + }*/ + } + break; + + default : + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Fail to create device to check mask's size."); + + } + + delete device; + + }else { + e = true; + param.det.errormsg.push_back("- ACQ_MASK_PATH : Fail to create device to check mask's size. CAMERA_ID not loaded."); + } + } + } + } + } + + if(!cfg.Get("DET_ENABLED", param.det.DET_ENABLED)) { + e = true; + param.det.errormsg.push_back("- DET_ENABLED : Fail to load value."); + } + + string det_mode; + if(!cfg.Get("DET_MODE", det_mode)) { + e = true; + param.det.errormsg.push_back("- DET_MODE : Fail to load value."); + }else { + try { + EParser detMode; + param.det.DET_MODE = detMode.parseEnum("DET_MODE", det_mode); + }catch (std::exception &ex) { + e = true; + param.det.errormsg.push_back("- DET_MODE : " + string(ex.what())); + } + } + + if(!cfg.Get("DET_DEBUG", param.det.DET_DEBUG)) { + e = true; + param.det.errormsg.push_back("- DET_DEBUG : Fail to load value."); + }else{ + + if(param.det.DET_DEBUG){ + + if(!cfg.Get("DET_DEBUG_PATH", param.det.DET_DEBUG_PATH)) { + e = true; + param.det.errormsg.push_back("- DET_DEBUG_PATH : Fail to load value."); + }else{ + + namespace fs = boost::filesystem; + path p(param.det.DET_DEBUG_PATH); + + if(!fs::exists(p)){ + if(!fs::create_directory(p)){ + e = true; + param.det.errormsg.push_back("- DET_DEBUG_PATH : Can't create Debug Path."); + } + } + } + } + } + + if(!cfg.Get("DET_TIME_AROUND", param.det.DET_TIME_AROUND)) { + e = true; + param.det.errormsg.push_back("- DET_TIME_AROUND : Fail to load value."); + } + + if(!cfg.Get("DET_TIME_MAX", param.det.DET_TIME_MAX)) { + e = true; + param.det.errormsg.push_back("- DET_TIME_MAX : Fail to load value."); + }else{ + + // If input device type is frames or video, increase DET_TIME_MAX because + // time can not be take account as the time interval between can be increased. + if(inputType == VIDEO || inputType == SINGLE_FITS_FRAME) { + param.det.DET_TIME_MAX = 10000; + }else{ + if(param.det.DET_TIME_MAX <= 0 || param.det.DET_TIME_MAX > 30) { + e = true; + param.det.errormsg.push_back("- DET_TIME_MAX : Available range is from 1 to 30 seconds."); + } + } + } + + string det_mthd; + if(!cfg.Get("DET_METHOD", det_mthd)) { + e = true; + param.det.errormsg.push_back("- DET_METHOD : Fail to load value."); + }else { + try { + EParser detMthd; + param.det.DET_METHOD = detMthd.parseEnum("DET_METHOD", det_mthd); + }catch (std::exception &ex) { + e = true; + param.st.errormsg.push_back("- DET_METHOD : " + string(ex.what())); + } + } + + if(!cfg.Get("DET_SAVE_FITS3D", param.det.DET_SAVE_FITS3D)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_FITS3D : Fail to load value."); + } + + if(!cfg.Get("DET_SAVE_FITS2D", param.det.DET_SAVE_FITS2D)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_FITS2D : Fail to load value."); + } + + if(!cfg.Get("DET_SAVE_SUM", param.det.DET_SAVE_SUM)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_SUM : Fail to load value."); + } + + if(!cfg.Get("DET_SUM_REDUCTION", param.det.DET_SUM_REDUCTION)) { + e = true; + param.det.errormsg.push_back("- DET_SUM_REDUCTION : Fail to load value."); + } + + string det_sum_mthd; + if(!cfg.Get("DET_SUM_MTHD", det_sum_mthd)) { + e = true; + param.det.errormsg.push_back("- DET_SUM_MTHD : Fail to load value."); + }else { + try { + EParser detSumMthd; + param.det.DET_SUM_MTHD = detSumMthd.parseEnum("DET_SUM_MTHD", det_sum_mthd); + }catch (std::exception &ex) { + e = true; + param.det.errormsg.push_back("- DET_SUM_MTHD : " + string(ex.what())); + } + } + + if(!cfg.Get("DET_SAVE_SUM_WITH_HIST_EQUALIZATION", param.det.DET_SAVE_SUM_WITH_HIST_EQUALIZATION)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_SUM_WITH_HIST_EQUALIZATION : Fail to load value."); + } + + if(!cfg.Get("DET_SAVE_AVI", param.det.DET_SAVE_AVI)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_AVI : Fail to load value."); + } + + if(!cfg.Get("DET_UPDATE_MASK", param.det.DET_UPDATE_MASK)) { + e = true; + param.det.errormsg.push_back("- DET_UPDATE_MASK : Fail to load value."); + } + + if(!cfg.Get("DET_UPDATE_MASK_FREQUENCY", param.det.DET_UPDATE_MASK_FREQUENCY)) { + e = true; + param.det.errormsg.push_back("- DET_UPDATE_MASK_FREQUENCY : Fail to load value."); + } + + if(!cfg.Get("DET_DEBUG_UPDATE_MASK", param.det.DET_DEBUG_UPDATE_MASK)) { + e = true; + param.det.errormsg.push_back("- DET_DEBUG_UPDATE_MASK : Fail to load value."); + }else{ + + if(param.det.DET_DEBUG_UPDATE_MASK){ + + if(!cfg.Get("DET_DEBUG_PATH", param.det.DET_DEBUG_PATH)) { + e = true; + param.det.errormsg.push_back("- DET_DEBUG_PATH : Fail to load value."); + }else{ + + namespace fs = boost::filesystem; + path p(param.det.DET_DEBUG_PATH); + + if(!fs::exists(p)){ + if(!fs::create_directory(p)){ + e = true; + param.det.errormsg.push_back("- DET_DEBUG_PATH : Can't create Debug Path. Debug Path must exist as DET_DEBUG_UPDATE_MASK is enabled."); + } + } + } + } + } + + // -------------------------------------------------------------------------------------- + + if(!cfg.Get("DET_DOWNSAMPLE_ENABLED", param.det.DET_DOWNSAMPLE_ENABLED)) { + e = true; + param.det.errormsg.push_back("- DET_DOWNSAMPLE_ENABLED : Fail to load value."); + } + + if(!cfg.Get("DET_SAVE_GEMAP", param.det.temporal.DET_SAVE_GEMAP)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_GEMAP : Fail to load value."); + } + + if(!cfg.Get("DET_SAVE_DIRMAP", param.det.temporal.DET_SAVE_DIRMAP)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_DIRMAP : Fail to load value."); + } + + if(!cfg.Get("DET_SAVE_POS", param.det.temporal.DET_SAVE_POS)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_POS : Fail to load value."); + } + + if(!cfg.Get("DET_LE_MAX", param.det.temporal.DET_LE_MAX)) { + e = true; + param.det.errormsg.push_back("- DET_LE_MAX : Fail to load value."); + }else{ + + if(param.det.temporal.DET_LE_MAX < 1 || param.det.temporal.DET_LE_MAX > 10) { + + e = true; + param.det.errormsg.push_back("- DET_LE_MAX : Available range is from 1 to 10."); + + } + + } + + if(!cfg.Get("DET_GE_MAX", param.det.temporal.DET_GE_MAX)) { + e = true; + param.det.errormsg.push_back("- DET_GE_MAX : Fail to load value."); + }else{ + + if(param.det.temporal.DET_GE_MAX < 1 || param.det.temporal.DET_GE_MAX > 10) { + + e = true; + param.det.errormsg.push_back("- DET_GE_MAX : Available range is from 1 to 10."); + + } + + } + + /*if(!cfg.Get("DET_SAVE_GE_INFOS", param.det.temporal.DET_SAVE_GE_INFOS)) { + e = true; + param.det.errormsg.push_back("- DET_SAVE_GE_INFOS : Fail to load value."); + }*/ + + if(!e) param.det.status = true; + +} + +void CfgParam::loadStackParam() { + + bool e = false; + + if(!cfg.Get("STACK_ENABLED", param.st.STACK_ENABLED)) { + e = true; + param.st.errormsg.push_back("- STACK_ENABLED : Fail to load value."); + } + + string stack_mode; + if(!cfg.Get("STACK_MODE", stack_mode)) { + e = true; + param.st.errormsg.push_back("- STACK_MODE : Fail to load value."); + }else { + try { + EParser stackMode; + param.st.STACK_MODE = stackMode.parseEnum("STACK_MODE", stack_mode); + }catch (std::exception &ex) { + e = true; + param.st.errormsg.push_back("- STACK_MODE : " + string(ex.what())); + } + } + + if(!cfg.Get("STACK_TIME", param.st.STACK_TIME)) { + e = true; + param.st.errormsg.push_back("- STACK_TIME : Fail to load value."); + } + + if(!cfg.Get("STACK_INTERVAL", param.st.STACK_INTERVAL)) { + e = true; + param.st.errormsg.push_back("- STACK_INTERVAL : Fail to load value."); + } + + string stack_mthd; + if(!cfg.Get("STACK_MTHD", stack_mthd)) { + e = true; + param.st.errormsg.push_back("- STACK_MTHD : Fail to load value."); + }else { + try { + EParser stackMthd; + param.st.STACK_MTHD = stackMthd.parseEnum("STACK_MTHD", stack_mthd); + }catch (std::exception &ex) { + e = true; + param.st.errormsg.push_back("- STACK_MTHD : " + string(ex.what())); + } + } + + if(!cfg.Get("STACK_REDUCTION", param.st.STACK_REDUCTION)) { + e = true; + param.st.errormsg.push_back("- STACK_REDUCTION : Fail to load value."); + } + + if(!e) param.st.status = true; + +} + +void CfgParam::loadStationParam() { + + bool e = false; + + if(!cfg.Get("STATION_NAME", param.station.STATION_NAME)) { + e = true; + param.station.errormsg.push_back("- STATION_NAME : Fail to load value."); + } + + if(!cfg.Get("TELESCOP", param.station.TELESCOP)) { + e = true; + param.station.errormsg.push_back("- TELESCOP : Fail to load value."); + } + + if(!cfg.Get("OBSERVER", param.station.OBSERVER)) { + e = true; + param.station.errormsg.push_back("- OBSERVER : Fail to load value."); + } + + if(!cfg.Get("INSTRUME", param.station.INSTRUME)) { + e = true; + param.station.errormsg.push_back("- INSTRUME : Fail to load value."); + } + + if(!cfg.Get("CAMERA", param.station.CAMERA)) { + e = true; + param.station.errormsg.push_back("- CAMERA : Fail to load value."); + } + + if(!cfg.Get("FOCAL", param.station.FOCAL)) { + e = true; + param.station.errormsg.push_back("- FOCAL : Fail to load value."); + } + + if(!cfg.Get("APERTURE", param.station.APERTURE)) { + e = true; + param.station.errormsg.push_back("- APERTURE : Fail to load value."); + } + + if(!cfg.Get("SITELONG", param.station.SITELONG)) { + e = true; + param.station.errormsg.push_back("- SITELONG : Fail to load value."); + } + + if(!cfg.Get("SITELAT", param.station.SITELAT)) { + e = true; + param.station.errormsg.push_back("- SITELAT : Fail to load value."); + } + + if(!cfg.Get("SITEELEV", param.station.SITEELEV)) { + e = true; + param.station.errormsg.push_back("- SITEELEV : Fail to load value."); + } + + if(!e) param.station.status = true; +} + +void CfgParam::loadFitskeysParam() { + + bool e = false; + + if(!cfg.Get("K1", param.fitskeys.K1)) { + e = true; + param.fitskeys.errormsg.push_back("- K1 : Fail to load value."); + } + + if(!cfg.Get("K2", param.fitskeys.K2)) { + e = true; + param.fitskeys.errormsg.push_back("- K2 : Fail to load value."); + } + + if(!cfg.Get("FILTER", param.fitskeys.FILTER)) { + e = true; + param.fitskeys.errormsg.push_back("- FILTER : Fail to load value."); + } + + if(!cfg.Get("CD1_1", param.fitskeys.CD1_1)) { + e = true; + param.fitskeys.errormsg.push_back("- CD1_1 : Fail to load value."); + } + + if(!cfg.Get("CD1_2", param.fitskeys.CD1_2)) { + e = true; + param.fitskeys.errormsg.push_back("- CD1_2 : Fail to load value."); + } + + if(!cfg.Get("CD2_1", param.fitskeys.CD2_1)) { + e = true; + param.fitskeys.errormsg.push_back("- CD2_1 : Fail to load value."); + } + + if(!cfg.Get("CD2_2", param.fitskeys.CD2_2)) { + e = true; + param.fitskeys.errormsg.push_back("- CD2_2 : Fail to load value."); + } + + if(!cfg.Get("XPIXEL", param.fitskeys.XPIXEL)) { + e = true; + param.fitskeys.errormsg.push_back("- XPIXEL : Fail to load value."); + } + + if(!cfg.Get("YPIXEL", param.fitskeys.YPIXEL)) { + e = true; + param.fitskeys.errormsg.push_back("- YPIXEL : Fail to load value."); + } + + if(!cfg.Get("COMMENT", param.fitskeys.COMMENT)) { + e = true; + param.fitskeys.errormsg.push_back("- COMMENT : Fail to load value."); + } + + if(!e) param.fitskeys.status = true; +} + +void CfgParam::loadMailParam() { + + bool e = false; + + if(!cfg.Get("MAIL_DETECTION_ENABLED", param.mail.MAIL_DETECTION_ENABLED)) { + e = true; + param.mail.errormsg.push_back("- MAIL_DETECTION_ENABLED : Fail to load value."); + }else{ + + if(param.mail.MAIL_DETECTION_ENABLED) { + + string mailRecipients; + if(!cfg.Get("MAIL_RECIPIENT", mailRecipients)) { + e = true; + param.mail.errormsg.push_back("- MAIL_RECIPIENT : Fail to load value."); + }else { + + typedef boost::tokenizer > tokenizer; + boost::char_separator sep(","); + tokenizer tokens(mailRecipients, sep); + + for (tokenizer::iterator tok_iter = tokens.begin();tok_iter != tokens.end(); ++tok_iter){ + param.mail.MAIL_RECIPIENTS.push_back(*tok_iter); + } + } + + if(!cfg.Get("MAIL_SMTP_SERVER", param.mail.MAIL_SMTP_SERVER)) { + e = true; + param.mail.errormsg.push_back("- MAIL_SMTP_SERVER : Fail to load value."); + } + + string smtp_connection_type; + if(!cfg.Get("MAIL_CONNECTION_TYPE", smtp_connection_type)) { + e = true; + param.mail.errormsg.push_back("- MAIL_CONNECTION_TYPE : Fail to load value."); + }else { + try{ + EParser smtp_security; + param.mail.MAIL_CONNECTION_TYPE = smtp_security.parseEnum("MAIL_CONNECTION_TYPE", smtp_connection_type); + + if(param.mail.MAIL_CONNECTION_TYPE != NO_SECURITY) { + + if(!cfg.Get("MAIL_SMTP_LOGIN", param.mail.MAIL_SMTP_LOGIN)) { + e = true; + param.mail.errormsg.push_back("- MAIL_SMTP_LOGIN : Fail to load value."); + } + + if(!cfg.Get("MAIL_SMTP_PASSWORD", param.mail.MAIL_SMTP_PASSWORD)) { + e = true; + param.mail.errormsg.push_back("- MAIL_SMTP_PASSWORD : Fail to load value."); + } + }else{ + param.mail.MAIL_SMTP_LOGIN = ""; + param.mail.MAIL_SMTP_PASSWORD = ""; + } + + }catch (std::exception &ex) { + e = true; + param.mail.errormsg.push_back("- MAIL_CONNECTION_TYPE : " + string(ex.what())); + } + } + } + } + + if(!e) param.mail.status = true; + +} + +int CfgParam::getDeviceID() { + return param.DEVICE_ID.first.first; +} + +dataParam CfgParam::getDataParam() { + return param.data; +} + +logParam CfgParam::getLogParam() { + return param.log; +} + +framesParam CfgParam::getFramesParam() { + return param.framesInput; +} + +videoParam CfgParam::getVidParam() { + return param.vidInput; +} + +cameraParam CfgParam::getCamParam() { + return param.camInput; +} + +detectionParam CfgParam::getDetParam() { + return param.det; +} + +stackParam CfgParam::getStackParam() { + return param.st; +} + +stationParam CfgParam::getStationParam() { + return param.station; +} + +fitskeysParam CfgParam::getFitskeysParam() { + return param.fitskeys; +} + +mailParam CfgParam::getMailParam() { + return param.mail; +} + +parameters CfgParam::getAllParam() { + return param; +} + +bool CfgParam::deviceIdIsCorrect() { + if(!param.DEVICE_ID.first.second) { + if(showErrors) { + cout << param.DEVICE_ID.second << endl; + } + return false; + } + return true; +} + +bool CfgParam::dataParamIsCorrect() { + if(!param.data.status) { + if(showErrors) { + for(int i = 0; i < param.data.errormsg.size(); i++) + cout << param.data.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::logParamIsCorrect() { + if(!param.log.status) { + if(showErrors) { + for(int i = 0; i < param.log.errormsg.size(); i++) + cout << param.log.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::framesParamIsCorrect() { + + if(!param.framesInput.status) { + if(showErrors) { + for(int i = 0; i < param.framesInput.errormsg.size(); i++) + cout << param.framesInput.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::vidParamIsCorrect() { + if(!param.vidInput.status) { + if(showErrors) { + for(int i = 0; i < param.vidInput.errormsg.size(); i++) + cout << param.vidInput.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::camParamIsCorrect() { + if(!param.camInput.status) { + if(showErrors) { + for(int i = 0; i < param.camInput.errormsg.size(); i++) + cout << param.camInput.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::detParamIsCorrect() { + if(!param.det.status) { + if(showErrors) { + for(int i = 0; i < param.det.errormsg.size(); i++) + cout << param.det.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::stackParamIsCorrect() { + if(!param.st.status) { + if(showErrors) { + for(int i = 0; i < param.st.errormsg.size(); i++) + cout << param.st.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::stationParamIsCorrect() { + if(!param.station.status) { + if(showErrors) { + for(int i = 0; i < param.station.errormsg.size(); i++) + cout << param.station.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::fitskeysParamIsCorrect() { + if(!param.fitskeys.status) { + if(showErrors) { + for(int i = 0; i < param.fitskeys.errormsg.size(); i++) + cout << param.fitskeys.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::mailParamIsCorrect() { + if(!param.mail.status) { + if(showErrors) { + for(int i = 0; i < param.mail.errormsg.size(); i++) + cout << param.mail.errormsg.at(i) << endl; + } + return false; + } + return true; +} + +bool CfgParam::inputIsCorrect() { + + switch(inputType) { + + case VIDEO : + return vidParamIsCorrect(); + break; + + case SINGLE_FITS_FRAME : + return framesParamIsCorrect(); + break; + + // camera + case CAMERA : + return camParamIsCorrect(); + break; + + } + + return false; +} + +bool CfgParam::allParamAreCorrect() { + + bool eFound = false; + + if(!deviceIdIsCorrect()){ + eFound = true; + cout << ">> Errors on device ID. " << endl; + } + + if(!dataParamIsCorrect()){ + eFound = true; + cout << ">> Errors on data parameters. " << endl; + } + + if(!logParamIsCorrect()){ + eFound = true; + cout << ">> Errors on log parameters. " << endl; + } + + if(!inputIsCorrect()){ + eFound = true; + cout << ">> Errors on input parameters. " << endl; + } + + if(!detParamIsCorrect()){ + eFound = true; + cout << ">> Errors on detection parameters. " << endl; + } + + if(!stackParamIsCorrect()){ + eFound = true; + cout << ">> Errors on stack parameters. " << endl; + } + + if(!stationParamIsCorrect()){ + eFound = true; + cout << ">> Errors on station parameters. " << endl; + } + + if(!fitskeysParamIsCorrect()){ + eFound = true; + cout << ">> Errors on fitskeys parameters. " << endl; + } + + if(!mailParamIsCorrect()){ + eFound = true; + cout << ">> Errors on mail parameters. " << endl; + } + + if(eFound) + return false; + + return true; +} + + + + diff --git a/CfgParam.h b/CfgParam.h new file mode 100644 index 0000000..0fb0600 --- /dev/null +++ b/CfgParam.h @@ -0,0 +1,139 @@ +/* + CfgParam.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file CfgParam.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/06/2014 +* \brief FreeTure parameters +*/ + +#pragma once + +#include +#include +#include +#include +#include +#include "opencv2/highgui/highgui.hpp" +#include +#include +#include "ECamPixFmt.h" +#include "ETimeMode.h" +#include "EImgFormat.h" +#include "EDetMeth.h" +#include "ELogSeverityLevel.h" +#include "EStackMeth.h" +#include "ESmtpSecurity.h" +#include +#include "CfgLoader.h" +#include "Device.h" +#include "EInputDeviceType.h" +#include "SParam.h" +#include "ECamSdkType.h" + +using namespace boost::filesystem; +using namespace std; +using namespace cv; + +class CfgParam{ + + private : + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public : + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("CfgParam")); + + } + + }initializer; + + CfgLoader cfg; + parameters param; + + InputDeviceType inputType; + + void loadDeviceID(); + void loadDataParam(); + void loadLogParam(); + void loadFramesParam(); + void loadVidParam(); + void loadCamParam(); + void loadDetParam(); + void loadStackParam(); + void loadStationParam(); + void loadFitskeysParam(); + void loadMailParam(); + + vector emsg; + + public : + + bool showErrors; + + /** + * Constructor. + * + */ + CfgParam(string cfgFilePath); + + int getDeviceID(); + dataParam getDataParam(); + logParam getLogParam(); + framesParam getFramesParam(); + videoParam getVidParam(); + cameraParam getCamParam(); + detectionParam getDetParam(); + stackParam getStackParam(); + stationParam getStationParam(); + fitskeysParam getFitskeysParam(); + mailParam getMailParam(); + parameters getAllParam(); + + bool deviceIdIsCorrect(); + bool dataParamIsCorrect(); + bool logParamIsCorrect(); + bool framesParamIsCorrect(); + bool vidParamIsCorrect(); + bool camParamIsCorrect(); + bool detParamIsCorrect(); + bool stackParamIsCorrect(); + bool stationParamIsCorrect(); + bool fitskeysParamIsCorrect(); + bool mailParamIsCorrect(); + bool allParamAreCorrect(); + bool inputIsCorrect(); + +}; + diff --git a/Circle.h b/Circle.h new file mode 100644 index 0000000..98f1266 --- /dev/null +++ b/Circle.h @@ -0,0 +1,316 @@ +/* + Circle.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Circle.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 19/06/2014 +* \brief +*/ + +#pragma once + +#include "opencv2/highgui/highgui.hpp" +#include +#include +#define _USE_MATH_DEFINES +#include +#include "Conversion.h" +#include "SaveImg.h" + +using namespace cv; +using namespace std; + +class Circle { + + private : + + Point mPos; // Center position. + int mRadius; + + public : + + Circle(Point center, int radius):mPos(center), mRadius(radius) { + + } + + Point getCenter() { return mPos;}; + int getRadius() { return mRadius;}; + + bool computeDiskSurfaceIntersection( Circle c2, + float &surfaceCircle1, + float &surfaceCircle2, + float &intersectedSurface, + bool enableDebug, + string debugPath) { + + Mat map; + bool res = false; + bool displayIntersectedSurface = false; + if(enableDebug) map = Mat(480, 640, CV_8UC3, Scalar(0,0,0)); + + surfaceCircle1 = 0.0; + surfaceCircle2 = 0.0; + intersectedSurface = 0.0; + + if(enableDebug) circle(map, mPos, mRadius, Scalar(0,255,0)); + if(enableDebug) circle(map, c2.getCenter(), c2.getRadius(), Scalar(0,0,255)); + + // Distance between two circles centers + float distPcNc = sqrt(pow((mPos.x - c2.getCenter().x),2) + pow((mPos.y - c2.getCenter().y),2)); + + // No intersections. + if(distPcNc > c2.getRadius() + mRadius) { + + if(enableDebug) putText(map, "No intersections." , cvPoint(15,15),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + res = false; + + // Circles coincide. + }else if(distPcNc == 0 && c2.getRadius() == mRadius) { + + if(enableDebug) putText(map, "Circles coincides." , cvPoint(15,15),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + res = true; + + // A circle is contained inside the other. + }else if(distPcNc < abs(c2.getRadius() - mRadius)) { + + if(enableDebug) putText(map, "A circle is contained whithin the other." , cvPoint(15,15),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + res = true; + + }else { + + surfaceCircle1 = M_PI * pow(mRadius,2); + surfaceCircle2 = M_PI * pow(c2.getRadius(),2); + + float R0 = mRadius; + float R1 = c2.getRadius(); + double x0 = mPos.x; + double y0 = mPos.y; + double x1 = c2.getCenter().x; + double y1 = c2.getCenter().y; + + if(mPos.y != c2.getCenter().y) { + + float N = (pow(R1,2) - pow(R0,2) - pow(x1,2) + pow(x0,2) - pow(y1,2) + pow(y0,2)) / (2 * (y0 - y1)); + float A = pow((x0-x1)/(y0-y1),2) + 1; + float B = 2*y0*((x0-x1)/(y0-y1))-2*N*((x0-x1)/(y0-y1))-2*x0; + float C = pow(x0,2) + pow(y0,2) + pow(N,2) - pow(R0,2) - 2* y0*N; + double delta = std::sqrt(pow(B,2)-4*A*C); + + //cout << delta << endl; + + if(delta > 0) { + + float resX1 = (-B-delta) / (2*A); + float resX2 = (-B+delta) / (2*A); + + float resY1 = N - resX1 * ((x0-x1)/(y0-y1)); + float resY2 = N - resX2 * ((x0-x1)/(y0-y1)); + + if(enableDebug) line(map, Point(resX1,resY1 ), Point(resX2,resY2 ), Scalar(255,255,255), 1, CV_AA); + + // Circle1 more inside the other + if(distPcNc > abs(c2.getRadius() - mRadius) && distPcNc < c2.getRadius() && c2.getRadius() > mRadius) { + + //cout << "one circle more inside the other" << endl; + + // Cord length. + double c = sqrt(pow((resX1 - resX2),2) + pow((resY1 - resY2),2)); + double cc = c/(2.0*R0); + if(cc>1.0) cc = 1.0; + double thetaCircle1 = 2.0* asin(cc); + double areaCircle1 = (pow(R0,2)/2) * (thetaCircle1 - sin(thetaCircle1)); + + double ccc = c/(2.0*R1); + if(ccc>1.0) ccc=1.0; + double thetaCircle2 = 2* asin(ccc); + double areaCircle2 = (pow(R1,2)/2) * (thetaCircle2 - sin(thetaCircle2)); + + intersectedSurface = surfaceCircle1 - areaCircle1 + areaCircle2; + + displayIntersectedSurface = true; + + // Circle2 more inside the other + }else if(distPcNc > abs(c2.getRadius() - mRadius )&& distPcNc < mRadius && mRadius > c2.getRadius()) { + + //cout << "one circle more inside the other" << endl; + + // Cord length. + double c = sqrt(pow((resX1 - resX2),2) + pow((resY1 - resY2),2)); + + double cc = c/(2.0*R0); + if(cc>1.0) cc = 1.0; + double thetaPosCircle = 2.0* asin(cc); + double areaPosCircle = (pow(R0,2)/2) * (thetaPosCircle - sin(thetaPosCircle)); + + double ccc = c/(2.0*R1); + if(ccc>1.0) ccc=1.0; + double thetaNegCircle = 2* asin(ccc); + double areaNegCircle = (pow(R1,2)/2) * (thetaNegCircle - sin(thetaNegCircle)); + + intersectedSurface = surfaceCircle2 - areaNegCircle + areaPosCircle; + + displayIntersectedSurface = true; + + }else if(distPcNc == c2.getRadius() || distPcNc == mRadius ) { + + //cout << "Outskirt" << endl; + + }else { + + double c = sqrt(pow((resX1 - resX2),2) + pow((resY1 - resY2),2)); + + double cc = c/(2.0*R0); + if(cc>1.0) cc = 1.0; + double thetaPosCircle = 2.0* asin(cc); + double areaPosCircle = (pow(R0,2)/2) * (thetaPosCircle - sin(thetaPosCircle)); + + double ccc = c/(2.0*R1); + if(ccc>1.0) ccc=1.0; + double thetaNegCircle = 2* asin(ccc); + double areaNegCircle = (pow(R1,2)/2) * (thetaNegCircle - sin(thetaNegCircle)); + + intersectedSurface = areaNegCircle + areaPosCircle; + + displayIntersectedSurface = true; + + } + + res = true; + + } + + }else { + + float x = (pow(R1,2) - pow(R0,2) - pow(x1,2) + pow(x0,2))/(2*(x0-x1)); + float A = 1.0; + float B = -2 * y1; + float C = pow(x1,2) + pow(x,2) - 2*x1*x + pow(y1,2) - pow(R1,2); + + double delta = std::sqrt(pow(B,2)-4*A*C); + + if(delta > 0) { + + float resY1 = (-B-delta) / (2*A); + float resY2 = (-B+delta) / (2*A); + + float resX1 = (pow(R1,2) - pow(R0,2) - pow(x1,2) + pow(x0,2) - pow(y1,2) + pow(y0,2))/(2*(x0-x1)); + float resX2 = (pow(R1,2) - pow(R0,2) - pow(x1,2) + pow(x0,2) - pow(y1,2) + pow(y0,2))/(2*(x0-x1)); + + if(enableDebug) line(map, Point(resX1,resY1 ), Point(resX2,resY2 ), Scalar(255,255,255), 1, CV_AA); + + // Circle neg more inside the other + if(distPcNc > abs(c2.getRadius() - mRadius) && distPcNc < c2.getRadius() && c2.getRadius() > mRadius) { + + // Cord length. + double c = sqrt(pow((resX1 - resX2),2) + pow((resY1 - resY2),2)); + + double cc = c/(2.0*R0); + if(cc>1.0) cc = 1.0; + double thetaPosCircle = 2.0* asin(cc); + + double areaPosCircle = (pow(R0,2)/2) * (thetaPosCircle - sin(thetaPosCircle)); + + double ccc = c/(2.0*R1); + if(ccc>1.0) ccc=1.0; + double thetaNegCircle = 2* asin(ccc); + double areaNegCircle = (pow(R1,2)/2) * (thetaNegCircle - sin(thetaNegCircle)); + + intersectedSurface = surfaceCircle1 - areaPosCircle + areaNegCircle; + + displayIntersectedSurface = true; + + // Circle pos more inside the other + }else if(distPcNc > abs(c2.getRadius() - mRadius )&& distPcNc < mRadius && mRadius > c2.getRadius()) { + + // Cord length. + double c = sqrt(pow((resX1 - resX2),2) + pow((resY1 - resY2),2)); + + double cc = c/(2.0*R0); + if(cc>1.0) cc = 1.0; + double thetaPosCircle = 2.0* asin(cc); + double areaPosCircle = (pow(R0,2)/2) * (thetaPosCircle - sin(thetaPosCircle)); + + double ccc = c/(2.0*R1); + if(ccc>1.0) ccc=1.0; + double thetaNegCircle = 2* asin(ccc); + double areaNegCircle = (pow(R1,2)/2) * (thetaNegCircle - sin(thetaNegCircle)); + + intersectedSurface = surfaceCircle2 - areaNegCircle + areaPosCircle; + + displayIntersectedSurface = true; + + }else if(distPcNc == c2.getRadius() || distPcNc ==mRadius ) { + + //cout << "Le centre d'un des cercles est sur la périphérie de l'autre" << endl; + + }else { + + double c = sqrt(pow((resX1 - resX2),2) + pow((resY1 - resY2),2)); + + double cc = c/(2.0*R0); + if(cc>1.0) cc = 1.0; + double thetaPosCircle = 2.0* asin(cc); + double areaPosCircle = (pow(R0,2)/2) * (thetaPosCircle - sin(thetaPosCircle)); + + double ccc = c/(2.0*R1); + if(ccc>1.0) ccc=1.0; + double thetaNegCircle = 2* asin(ccc); + double areaNegCircle = (pow(R1,2)/2) * (thetaNegCircle - sin(thetaNegCircle)); + + intersectedSurface = areaNegCircle + areaPosCircle; + + displayIntersectedSurface = true; + + + } + + res = true; + + } + + } + + } + + if(enableDebug && displayIntersectedSurface) { + + putText(map, "Intersected surface : " , cvPoint(15,15),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + string msg1 = "- Green circle : " + Conversion::floatToString((intersectedSurface * 100) / surfaceCircle1) + "%" ; + putText(map, msg1 , cvPoint(15,30),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + string msg2 = "- Red circle : " + Conversion::floatToString((intersectedSurface * 100) / surfaceCircle2)+ "%"; + putText(map, msg2 , cvPoint(15,45),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + + } + + if(enableDebug) SaveImg::saveBMP(map, debugPath); + + return res; + + } + +}; diff --git a/Conversion.cpp b/Conversion.cpp new file mode 100644 index 0000000..5bc0b27 --- /dev/null +++ b/Conversion.cpp @@ -0,0 +1,219 @@ +/* + Conversion.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Conversion.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 13/06/2014 +* \brief Various conversion tools. +*/ + +#include "Conversion.h" + +string Conversion::matTypeToString(int type) { + + string r; + + uchar depth = type & CV_MAT_DEPTH_MASK; + uchar chans = 1 + (type >> CV_CN_SHIFT); + + switch ( depth ) { + + case CV_8U: r = "8U"; break; + case CV_8S: r = "8S"; break; + case CV_16U: r = "16U"; break; + case CV_16S: r = "16S"; break; + case CV_32S: r = "32S"; break; + case CV_32F: r = "32F"; break; + case CV_64F: r = "64F"; break; + default: r = "User"; break; + + } + + r += "C"; + r += (chans+'0'); + + return r; + +} + +string Conversion::intToString(int nb){ + + ostringstream oss; + oss << nb; + string result = oss.str(); + return result; + +} + +float Conversion::roundToNearest(float value, float precision) { + + float fractpart1 = 0.0, intpart1 = 0.0, fractpart2 = 0.0, intpart2 = 0.0; + fractpart1 = modf (value , &intpart1); + float d = fractpart1/precision; + fractpart2 = modf (d , &intpart2); + return intpart1 + intpart2*precision; +} + +string Conversion::floatToString(float nb){ + + std::ostringstream ss; + ss << nb; + std::string s(ss.str()); + return s; + +} + +string Conversion::doubleToString(double nb) { + + std::ostringstream strs; + strs << nb; + std::string str = strs.str(); + + return str; + +} + +void Conversion::stringTok(list &container, string const &in, const char * const delimiters = "_"){ + + const string::size_type len = in.length(); + string::size_type i = 0; + + while (i < len){ + + // Eat leading whitespace + i = in.find_first_not_of(delimiters, i); + + if (i == string::npos) + return; // Nothing left but white space + + // Find the end of the token + string::size_type j = in.find_first_of(delimiters, i); + + // Push token + if (j == string::npos){ + + container.push_back(in.substr(i)); + return; + + }else + + container.push_back(in.substr(i, j-i)); + + // Set up for next loop + i = j + 1; + + } +} + +Mat Conversion::convertTo8UC1(Mat &img) { + + Mat tmp; + img.copyTo(tmp); + double min, max; + minMaxLoc(tmp, &min, &max); + tmp.convertTo(tmp, CV_8UC1, 255.0/(max - min), -min * 255.0/(max - min)); + + return tmp; + +} + +int Conversion::countNumberDigit(int n){ + + int nbDigit = 0; + + while(n!=0){ + + n/=10; + ++nbDigit; + + } + + return nbDigit; +} + +int Conversion::roundToUpperRange(int n) { + + int nbDigit = 0; + int last = 0; + + while(n!=0){ + + last = n; + n/=10; + ++nbDigit; + + } + + int f = 1; + + for(int i = 1; i < nbDigit; i++) + f *=10; + + return (last+1) * f; +} + +string Conversion::numbering(int totalDigit, int n) { + + int cpt = 0; + + int nbZeroToAdd = 0; + + string ch = ""; + + if(n<10){ + + nbZeroToAdd = totalDigit - 1; + + for(int i = 0; i < nbZeroToAdd; i++){ + + ch += "0"; + + } + + }else{ + + while(n > 0){ + + n/=10; + cpt ++; + + } + + nbZeroToAdd = totalDigit - cpt; + + for(int i = 0; i < nbZeroToAdd; i++){ + + ch += "0"; + + } + + } + + return ch; +} diff --git a/Conversion.h b/Conversion.h new file mode 100644 index 0000000..a75a83c --- /dev/null +++ b/Conversion.h @@ -0,0 +1,122 @@ +/* + Conversion.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Conversion.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 13/06/2014 +* \brief Various conversion tools. +*/ + +#pragma once + +#include +#include +#include +#include "opencv2/highgui/highgui.hpp" +#include + +using namespace std; +using namespace cv; + +class Conversion { + + public : + + /** + * Get type of opencv mat object. + * + * @param type mat.type() + * @return Type in string. + */ + static string matTypeToString(int type); + + /** + * Convert an int value to string. + * + * @param nb Integer value. + * @return String value. + */ + static string intToString(int nb); + + /** + * Convert a float value to string. + * + * @param nb Float value. + * @return String value. + */ + static string floatToString(float nb); + + static string doubleToString(double nb); + + /** + * Extract data from a string according to a delimiter. + * + * @param container List of extracted data. + * @param in String to analyse. + * @param delimiters + */ + static void stringTok(list &container, string const &in, const char * const delimiters); + + /** + * Convert an opencv image to 8 bits. + * + * @param img Opencv image to convert. + * @return 8 bits opencv mat. + */ + static Mat convertTo8UC1(Mat &img); + + /** + * Determine the number of "0" required. + * + * @param totalDigit Maximum number of digits. + * @param n Integer value. + * @return Number of 0 to add to reach maximum of available digits. + */ + static string numbering(int totalDigit, int n); + + /** + * Count number of digit in a value. + * + * @param n Integer value. + * @return Number of digits. + */ + static int countNumberDigit(int n); + + /** + * Round an integer to upper range. + * + * @param n Integer value. Example : 103, 1025. + * @return Rounded value. Example : 200, 2000. + */ + static int roundToUpperRange(int n); + + static float roundToNearest(float value, float precision); + +}; + diff --git a/DetThread.cpp b/DetThread.cpp new file mode 100644 index 0000000..9c90f9c --- /dev/null +++ b/DetThread.cpp @@ -0,0 +1,806 @@ +/* + DetThread.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau -- FRIPON-GEOPS-UPSUD +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file DetThread.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/06/2014 +* \brief Detection thread. +*/ + +#include "DetThread.h" + +boost::log::sources::severity_logger< LogSeverityLevel > DetThread::logger; + +DetThread::Init DetThread::initializer; + +DetThread::DetThread( boost::circular_buffer *fb, + boost::mutex *fb_m, + boost::condition_variable *fb_c, + bool *dSignal, + boost::mutex *dSignal_m, + boost::condition_variable *dSignal_c, + detectionParam dtp, + dataParam dp, + mailParam mp, + stationParam sp, + fitskeysParam fkp, + CamPixFmt pfmt): + + pDetMthd(NULL), mForceToReset(false), mMustStop(false), + mEventPath(""), mIsRunning(false), mNbDetection(0), mWaitFramesToCompleteEvent(false), mCurrentDataSetLocation(""), + mNbWaitFrames(0), mInterruptionStatus(false) { + + frameBuffer = fb; + frameBuffer_mutex = fb_m; + frameBuffer_condition = fb_c; + detSignal = dSignal; + detSignal_mutex = dSignal_m; + detSignal_condition = dSignal_c; + pThread = NULL; + mFormat = pfmt; + mStationName = sp.STATION_NAME; + mdp = dp; + mdtp = dtp; + mmp = mp; + mfkp = fkp; + mstp = sp; + mNbFramesAround = 0; + + mFitsHeader.loadKeys(fkp, sp); + + switch(dtp.DET_METHOD){ + + case TEMPORAL_MTHD : + + { + + pDetMthd = new DetectionTemporal(dtp, pfmt); + + } + + break; + + case TEMPLATE_MTHD: + + { + + pDetMthd = new DetectionTemplate(dtp, pfmt); + + } + + break; + + } + +} + +DetThread::~DetThread(void){ + + if(pDetMthd != NULL){ + + BOOST_LOG_SEV(logger, notification) << "Remove pDetMthd instance."; + delete pDetMthd; + + } + + if (pThread!=NULL){ + + BOOST_LOG_SEV(logger, notification) << "Remove detThread instance."; + delete pThread; + + } +} + +bool DetThread::startThread(){ + + BOOST_LOG_SEV(logger, notification) << "Creating detThread..."; + pThread = new boost::thread(boost::ref(*this)); + + return true; +} + +void DetThread::stopThread(){ + + BOOST_LOG_SEV(logger, notification) << "Stopping detThread..."; + + // Signal the thread to stop (thread-safe) + mMustStopMutex.lock(); + mMustStop=true; + mMustStopMutex.unlock(); + + // Wait for the thread to finish. + while(pThread->timed_join(boost::posix_time::seconds(2)) == false){ + + BOOST_LOG_SEV(logger, notification) << "Interrupting detThread..."; + pThread->interrupt(); + + } +} + +Detection* DetThread::getDetMethod(){ + + return pDetMthd; + +} + +void DetThread::interruptThread(){ + + mInterruptionStatusMutex.lock(); + mInterruptionStatus = true; + mInterruptionStatusMutex.unlock(); + +} + +void DetThread::operator ()(){ + + bool stopThread = false; + mIsRunning = true; + // Flag to indicate that an event must be complete with more frames. + bool eventToComplete = false; + // Reference date to count time to complete an event. + string refDate; + + BOOST_LOG_SCOPED_THREAD_TAG("LogName", "DET_THREAD"); + BOOST_LOG_SEV(logger,notification) << "\n"; + BOOST_LOG_SEV(logger,notification) << "=============================================="; + BOOST_LOG_SEV(logger,notification) << "=========== Start detection thread ==========="; + BOOST_LOG_SEV(logger,notification) << "=============================================="; + + /// Thread loop. + try{ + + do{ + + try { + + /// Wait new frame from AcqThread. + boost::mutex::scoped_lock lock(*detSignal_mutex); + while (!(*detSignal)) detSignal_condition->wait(lock); + *detSignal = false; + lock.unlock(); + + // Check interruption signal from AcqThread. + mForceToReset = false; + mInterruptionStatusMutex.lock(); + if(mInterruptionStatus) { + BOOST_LOG_SEV(logger, notification) << "Interruption status : " << mInterruptionStatus; + BOOST_LOG_SEV(logger, notification) << "-> reset forced on detection method."; + mForceToReset = true; + } + mInterruptionStatusMutex.unlock(); + + if(!mForceToReset){ + + // Fetch the last grabbed frame. + Frame lastFrame; + boost::mutex::scoped_lock lock2(*frameBuffer_mutex); + if(frameBuffer->size() > 2) lastFrame = frameBuffer->back(); + lock2.unlock(); + + double t = (double)getTickCount(); + + if(lastFrame.mImg.data) { + mFormat = lastFrame.mFormat; + + // Run detection process. + if(pDetMthd->runDetection(lastFrame) && !eventToComplete){ + + // Event detected. + BOOST_LOG_SEV(logger, notification) << "Event detected ! Waiting frames to complete the event..." << endl; + eventToComplete = true; + + // Get a reference date. + string currDate = to_simple_string(boost::posix_time::microsec_clock::universal_time()); + refDate = currDate.substr(0, currDate.find(".")); + + mNbDetection++; + + } + + // Wait frames to complete the detection. + if(eventToComplete){ + + string currDate = to_simple_string(boost::posix_time::microsec_clock::universal_time()); + string nowDate = currDate.substr(0, currDate.find(".")); + boost::posix_time::ptime t1(boost::posix_time::time_from_string(refDate)); + boost::posix_time::ptime t2(boost::posix_time::time_from_string(nowDate)); + boost::posix_time::time_duration td = t2 - t1; + + if(td.total_seconds() > mdtp.DET_TIME_AROUND) { + + BOOST_LOG_SEV(logger, notification) << "Event completed." << endl; + + // Build event directory. + mEventDate = pDetMthd->getEventDate(); + BOOST_LOG_SEV(logger, notification) << "Building event directory..." << endl; + + if(buildEventDataDirectory()) + BOOST_LOG_SEV(logger, notification) << "Success to build event directory." << endl; + else + BOOST_LOG_SEV(logger, fail) << "Fail to build event directory." << endl; + + // Save event. + BOOST_LOG_SEV(logger, notification) << "Saving event..." << endl; + pDetMthd->saveDetectionInfos(mEventPath, mNbFramesAround); + boost::mutex::scoped_lock lock(*frameBuffer_mutex); + if(!saveEventData(pDetMthd->getEventFirstFrameNb(), pDetMthd->getEventLastFrameNb())) + BOOST_LOG_SEV(logger,critical) << "Error saving event data."; + else + BOOST_LOG_SEV(logger, notification) << "Success to save event !" << endl; + + lock.unlock(); + + // Reset detection. + BOOST_LOG_SEV(logger, notification) << "Reset detection process." << endl; + pDetMthd->resetDetection(false); + eventToComplete = false; + mNbFramesAround = 0; + + } + + mNbFramesAround++; + } + } + + t = (((double)getTickCount() - t)/getTickFrequency())*1000; + cout << " [ TIME DET ] : " << std::setprecision(3) << std::fixed << t << " ms " << endl; + BOOST_LOG_SEV(logger,normal) << " [ TIME DET ] : " << std::setprecision(3) << std::fixed << t << " ms "; + + }else{ + + // reset method + if(pDetMthd != NULL) + pDetMthd->resetDetection(false); + + eventToComplete = false; + mNbWaitFrames = 0; + + mInterruptionStatusMutex.lock(); + mInterruptionStatus = false; + mInterruptionStatusMutex.unlock(); + + } + + }catch(const boost::thread_interrupted&){ + + BOOST_LOG_SEV(logger,notification) << "Detection Thread INTERRUPTED"; + + } + + mMustStopMutex.lock(); + stopThread = mMustStop; + mMustStopMutex.unlock(); + + }while(!stopThread); + + if(mDetectionResults.size() == 0) { + + cout << "-----------------------------------------------" << endl; + cout << "------------->> DETECTED EVENTS : " << mNbDetection << endl; + cout << "-----------------------------------------------" << endl; + + }else { + + // Create Report for videos and frames in input. + ofstream report; + string reportPath = mdp.DATA_PATH + "detections_report.txt"; + report.open(reportPath.c_str()); + + cout << "--------------- DETECTION REPORT --------------" << endl; + + for(int i = 0; i < mDetectionResults.size(); i++) { + report << mDetectionResults.at(i).first << "------> " << mDetectionResults.at(i).second << "\n"; + cout << "- DATASET " << i << " : "; + + if(mDetectionResults.at(i).second > 1) + cout << mDetectionResults.at(i).second << " events" << endl; + else + cout << mDetectionResults.at(i).second << " event" << endl; + } + + cout << "-----------------------------------------------" << endl; + + report.close(); + + } + + }catch(const char * msg){ + + cout << msg << endl; + BOOST_LOG_SEV(logger,critical) << msg; + + }catch(exception& e){ + + cout << "An error occured. See log for details." << endl; + cout << e.what() << endl; + BOOST_LOG_SEV(logger, critical) << e.what(); + + } + + mIsRunning = false; + + BOOST_LOG_SEV(logger,notification) << "DetThread ended."; + +} + +bool DetThread::getRunStatus(){ + + return mIsRunning; + +} + +bool DetThread::buildEventDataDirectory(){ + + namespace fs = boost::filesystem; + + // eventDate is the date of the first frame attached to the event. + string YYYYMMDD = TimeDate::getYYYYMMDD(mEventDate); + + // Data location. + path p(mdp.DATA_PATH); + + // Create data directory for the current day. + string fp = mdp.DATA_PATH + mStationName + "_" + YYYYMMDD +"/"; + path p0(fp); + + // Events directory. + string fp1 = "events/"; + path p1(fp + fp1); + + // Current event directory with the format : STATION_AAAAMMDDThhmmss_UT + string fp2 = mStationName + "_" + TimeDate::getYYYYMMDDThhmmss(mEventDate) + "_UT/"; + path p2(fp + fp1 + fp2); + + // Final path used by an other function to save event data. + mEventPath = fp + fp1 + fp2; + + // Check if data path specified in the configuration file exists. + if(fs::exists(p)){ + + // Check DataLocation/STATION_AAMMDD/ + if(fs::exists(p0)){ + + // Check DataLocation/STATION_AAMMDD/events/ + if(fs::exists(p1)){ + + // Check DataLocation/STATION_AAMMDD/events/STATION_AAAAMMDDThhmmss_UT/ + if(!fs::exists(p2)){ + + // Create DataLocation/STATION_AAMMDD/events/STATION_AAAAMMDDThhmmss_UT/ + if(!fs::create_directory(p2)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p2; + return false; + + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create : " << p2; + return true; + } + + } + + }else{ + + // Create DataLocation/STATION_AAMMDD/events/ + if(!fs::create_directory(p1)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p1; + return false; + + }else{ + + // Create DataLocation/STATION_AAMMDD/events/STATION_AAAAMMDDThhmmss_UT/ + if(!fs::create_directory(p2)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p2; + return false; + + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create : " << p2; + return true; + + } + } + } + + }else{ + + // Create DataLocation/STATION_AAMMDD/ + if(!fs::create_directory(p0)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p0; + return false; + + }else{ + + // Create DataLocation/STATION_AAMMDD/events/ + if(!fs::create_directory(p1)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p1; + return false; + + }else{ + + // Create DataLocation/STATION_AAMMDD/events/STATION_AAAAMMDDThhmmss_UT/ + if(!fs::create_directory(p2)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p2; + return false; + + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create : " << p2; + return true; + + } + } + } + } + + }else{ + + // Create DataLocation/ + if(!fs::create_directory(p)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p; + return false; + + }else{ + + // Create DataLocation/STATION_AAMMDD/ + if(!fs::create_directory(p0)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p0; + return false; + + }else{ + + //Create DataLocation/STATION_AAMMDD/events/ + if(!fs::create_directory(p1)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p1; + return false; + + }else{ + + // Create DataLocation/STATION_AAMMDD/events/STATION_AAAAMMDDThhmmss_UT/ + if(!fs::create_directory(p2)){ + + BOOST_LOG_SEV(logger,fail) << "Fail to create : " << p2; + return false; + + }else{ + + BOOST_LOG_SEV(logger,notification) << "Success to create : " << p1; + return true; + + } + } + } + } + } + + return true; +} + +bool DetThread::saveEventData(int firstEvPosInFB, int lastEvPosInFB){ + + namespace fs = boost::filesystem; + + // List of data path to attach to the mail notification. + vector mailAttachments; + + // Number of the first frame to save. It depends of how many frames we want to keep before the event. + int numFirstFrameToSave = firstEvPosInFB - mNbFramesAround; + + // Number of the last frame to save. It depends of how many frames we want to keep after the event. + int numLastFrameToSave = lastEvPosInFB + mNbFramesAround; + + // If the number of the first frame to save for the event is not in the framebuffer. + // The first frame to save become the first frame available in the framebuffer. + if(frameBuffer->front().mFrameNumber > numFirstFrameToSave) + numFirstFrameToSave = frameBuffer->front().mFrameNumber; + + // Check the number of the last frame to save. + if(frameBuffer->back().mFrameNumber < numLastFrameToSave) + numLastFrameToSave = frameBuffer->back().mFrameNumber; + + // Total frames to save. + int nbTotalFramesToSave = numLastFrameToSave - numFirstFrameToSave; + + // Count number of digit on nbTotalFramesToSave. + int n = nbTotalFramesToSave; + int nbDigitOnNbTotalFramesToSave = 0; + + while(n!=0){ + n/=10; + ++nbDigitOnNbTotalFramesToSave; + } + + BOOST_LOG_SEV(logger,notification) << "> First frame to save : " << numFirstFrameToSave; + BOOST_LOG_SEV(logger,notification) << "> Lst frame to save : " << numLastFrameToSave; + BOOST_LOG_SEV(logger,notification) << "> First event frame : " << firstEvPosInFB; + BOOST_LOG_SEV(logger,notification) << "> Last event frame : " << lastEvPosInFB; + BOOST_LOG_SEV(logger,notification) << "> Frames before : " << mNbFramesAround; + BOOST_LOG_SEV(logger,notification) << "> Frames after : " << mNbFramesAround; + BOOST_LOG_SEV(logger,notification) << "> Total frames to save : " << nbTotalFramesToSave; + BOOST_LOG_SEV(logger,notification) << "> Total digit : " << nbDigitOnNbTotalFramesToSave; + + TimeDate::Date dateFirstFrame; + + int c = 0; + + // Init video avi + VideoWriter *video = NULL; + + if(mdtp.DET_SAVE_AVI) { + video = new VideoWriter(mEventPath + "video.avi", CV_FOURCC('M', 'J', 'P', 'G'), 5, Size(static_cast(frameBuffer->front().mImg.cols), static_cast(frameBuffer->front().mImg.rows)), false); + } + + // Init fits 3D. + Fits3D fits3d; + + if(mdtp.DET_SAVE_FITS3D) { + + fits3d = Fits3D(mFormat, frameBuffer->front().mImg.rows, frameBuffer->front().mImg.cols, (numLastFrameToSave - numFirstFrameToSave +1), mEventPath + "fits3D"); + boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time(); + fits3d.kDATE = to_iso_extended_string(time); + + // Name of the fits file. + fits3d.kFILENAME = mEventPath + "fitscube.fit"; + + } + + // Init sum. + Stack stack = Stack(mdp.FITS_COMPRESSION_METHOD, mfkp, mstp); + + // Exposure time sum. + double sumExpTime = 0.0; + double firstExpTime = 0.0; + bool varExpTime = false; + + // Loop framebuffer. + boost::circular_buffer::iterator it; + for(it = frameBuffer->begin(); it != frameBuffer->end(); ++it){ + + // Get infos about the first frame of the event for fits 3D. + if((*it).mFrameNumber == numFirstFrameToSave && mdtp.DET_SAVE_FITS3D){ + + fits3d.kDATEOBS = TimeDate::getIsoExtendedFormatDate((*it).mDate); + + // Gain. + fits3d.kGAINDB = (*it).mGain; + // Saturation. + fits3d.kSATURATE = (*it).mSaturatedValue; + // FPS. + fits3d.kCD3_3 = (*it).mFps; + // CRVAL1 : sideral time. + double julianDate = TimeDate::gregorianToJulian((*it).mDate); + double julianCentury = TimeDate::julianCentury(julianDate); + double sideralT = TimeDate::localSideralTime_2(julianCentury, (*it).mDate.hours, (*it).mDate.minutes, (int)(*it).mDate.seconds, mFitsHeader.kSITELONG); + fits3d.kCRVAL1 = sideralT; + // Projection and reference system + fits3d.kCTYPE1 = "RA---ARC"; + fits3d.kCTYPE2 = "DEC--ARC"; + // Equinox + fits3d.kEQUINOX = 2000.0; + firstExpTime = (*it).mExposure; + dateFirstFrame = (*it).mDate; + + } + + // Get infos about the last frame of the event record for fits 3D. + if((*it).mFrameNumber == numLastFrameToSave && mdtp.DET_SAVE_FITS3D){ + cout << "DATE first : " << dateFirstFrame.hours << " H " << dateFirstFrame.minutes << " M " << dateFirstFrame.seconds << " S" << endl; + cout << "DATE last : " << (*it).mDate.hours << " H " << (*it).mDate.minutes << " M " << (*it).mDate.seconds << " S" << endl; + fits3d.kELAPTIME = ((*it).mDate.hours*3600 + (*it).mDate.minutes*60 + (*it).mDate.seconds) - (dateFirstFrame.hours*3600 + dateFirstFrame.minutes*60 + dateFirstFrame.seconds); + + } + + // If the current frame read from the framebuffer has to be saved. + if((*it).mFrameNumber >= numFirstFrameToSave && (*it).mFrameNumber < numLastFrameToSave) { + + // Save fits2D. + if(mdtp.DET_SAVE_FITS2D) { + + string fits2DPath = mEventPath + "fits2D/"; + string fits2DName = "frame_" + Conversion::numbering(nbDigitOnNbTotalFramesToSave, c) + Conversion::intToString(c); + BOOST_LOG_SEV(logger,notification) << ">> Saving fits2D : " << fits2DName; + + Fits2D newFits(fits2DPath); + newFits.loadKeys(mfkp, mstp); + // Frame's acquisition date. + newFits.kDATEOBS = TimeDate::getIsoExtendedFormatDate((*it).mDate); + // Fits file creation date. + boost::posix_time::ptime time = boost::posix_time::second_clock::universal_time(); + // YYYYMMDDTHHMMSS,fffffffff where T is the date-time separator + newFits.kDATE = to_iso_string(time); + // Name of the fits file. + newFits.kFILENAME = fits2DName; + // Exposure time. + newFits.kONTIME = (*it).mExposure/1000000.0; + // Gain. + newFits.kGAINDB = (*it).mGain; + // Saturation. + newFits.kSATURATE = (*it).mSaturatedValue; + // FPS. + newFits.kCD3_3 = (*it).mFps; + // CRVAL1 : sideral time. + double julianDate = TimeDate::gregorianToJulian((*it).mDate); + double julianCentury = TimeDate::julianCentury(julianDate); + double sideralT = TimeDate::localSideralTime_2(julianCentury, (*it).mDate.hours, (*it).mDate.minutes, (*it).mDate.seconds, mFitsHeader.kSITELONG); + newFits.kCRVAL1 = sideralT; + newFits.kEXPOSURE = (*it).mExposure/1000000.0; + // Projection and reference system + newFits.kCTYPE1 = "RA---ARC"; + newFits.kCTYPE2 = "DEC--ARC"; + // Equinox + newFits.kEQUINOX = 2000.0; + + if(!fs::exists(path(fits2DPath))) { + if(fs::create_directory(path(fits2DPath))) + BOOST_LOG_SEV(logger,notification) << "Success to create directory : " << fits2DPath; + } + + switch(mFormat) { + + case MONO12 : + { + newFits.writeFits((*it).mImg, S16, fits2DName, mdp.FITS_COMPRESSION_METHOD); + } + break; + + default : + + { + newFits.writeFits((*it).mImg, UC8, fits2DName, mdp.FITS_COMPRESSION_METHOD); + } + + } + + } + + if(mdtp.DET_SAVE_AVI) { + Mat iv = Conversion::convertTo8UC1((*it).mImg); + if(video->isOpened()) { + video->write(iv); + } + } + + // Add a frame to fits cube. + if(mdtp.DET_SAVE_FITS3D) { + + if(firstExpTime != (*it).mExposure) + varExpTime = true; + + sumExpTime += (*it).mExposure; + fits3d.addImageToFits3D((*it).mImg); + + } + + // Add frame to the event's stack. + if(mdtp.DET_SAVE_SUM && (*it).mFrameNumber >= firstEvPosInFB && (*it).mFrameNumber <= lastEvPosInFB){ + + stack.addFrame((*it)); + + } + + c++; + + } + } + + if(mdtp.DET_SAVE_AVI) { + if(video != NULL) + delete video; + } + + // ********************************* SAVE EVENT IN FITS CUBE *********************************** + + if(mdtp.DET_SAVE_FITS3D) { + + // Exposure time of a single frame. + if(varExpTime) + fits3d.kEXPOSURE = 999999; + else { + it = frameBuffer->begin(); + fits3d.kEXPOSURE = (*it).mExposure/1000000.0; + } + + // Exposure time sum of frames in the fits cube. + fits3d.kONTIME = sumExpTime/1000000.0; + + fits3d.writeFits3D(); + + } + + // ********************************* SAVE EVENT STACK IN FITS ********************************** + + if(mdtp.DET_SAVE_SUM) { + + stack.saveStack(mEventPath, mdtp.DET_SUM_MTHD, mdtp.DET_SUM_REDUCTION); + + } + + // ************************** EVENT STACK WITH HISTOGRAM EQUALIZATION *************************** + + if(mdtp.DET_SAVE_SUM_WITH_HIST_EQUALIZATION) { + + Mat s,s1, eqHist; + float bzero = 0.0; + float bscale = 1.0; + s = stack.reductionByFactorDivision(bzero,bscale); + cout << "mFormat : " << mFormat << endl; + if(mFormat != MONO8) + Conversion::convertTo8UC1(s).copyTo(s); + + equalizeHist(s, eqHist); + SaveImg::saveJPEG(eqHist,mEventPath+mStationName+"_"+TimeDate::getYYYYMMDDThhmmss(mEventDate)+"_UT"); + + } + + // *********************************** SEND MAIL NOTIFICATION *********************************** + BOOST_LOG_SEV(logger,notification) << "Prepare mail..." << mmp.MAIL_DETECTION_ENABLED; + if(mmp.MAIL_DETECTION_ENABLED) { + + BOOST_LOG_SEV(logger,notification) << "Sending mail..."; + + for(int i = 0; i < pDetMthd->getDebugFiles().size(); i++) { + + if(boost::filesystem::exists( mEventPath + pDetMthd->getDebugFiles().at(i))) { + + BOOST_LOG_SEV(logger,notification) << "Send : " << mEventPath << pDetMthd->getDebugFiles().at(i); + mailAttachments.push_back(mEventPath + pDetMthd->getDebugFiles().at(i)); + + } + } + + if(mdtp.DET_SAVE_SUM_WITH_HIST_EQUALIZATION && boost::filesystem::exists(mEventPath + mStationName + "_" + TimeDate::getYYYYMMDDThhmmss(mEventDate) + "_UT.jpg")) { + + BOOST_LOG_SEV(logger,notification) << "Send : " << mEventPath << mStationName << "_" << TimeDate::getYYYYMMDDThhmmss(mEventDate) << "_UT.jpg"; + mailAttachments.push_back(mEventPath + mStationName + "_" + TimeDate::getYYYYMMDDThhmmss(mEventDate) + "_UT.jpg"); + + } + + SMTPClient::sendMail( mmp.MAIL_SMTP_SERVER, + mmp.MAIL_SMTP_LOGIN, + mmp.MAIL_SMTP_PASSWORD, + "freeture@" + mStationName +".fr", + mmp.MAIL_RECIPIENTS, + mStationName + "-" + TimeDate::getYYYYMMDDThhmmss(mEventDate), + mStationName + "\n" + mEventPath, + mailAttachments, + mmp.MAIL_CONNECTION_TYPE); + + } + + return true; + +} diff --git a/DetThread.h b/DetThread.h new file mode 100644 index 0000000..5629d7d --- /dev/null +++ b/DetThread.h @@ -0,0 +1,189 @@ +/* + DetThread.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file DetThread.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/06/2014 +* \brief Detection thread. +*/ + +#pragma once + +#include "config.h" + +#ifdef LINUX + #define BOOST_LOG_DYN_LINK 1 +#endif + +#include "SMTPClient.h" +#include +#include "Fits.h" +#include "Fits2D.h" +#include "TimeDate.h" +#include "Fits3D.h" +#include "Stack.h" +#include "Detection.h" +#include "DetectionTemporal.h" +#include "DetectionTemplate.h" +#include "EStackMeth.h" +#include "EDetMeth.h" +#include +#include +#include "ESmtpSecurity.h" +#include "SParam.h" + +using namespace boost::filesystem; +using namespace cv; +using namespace std; +using namespace boost::posix_time; + +class DetThread { + + private : + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init{ + + public : + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("DetThread")); + + } + + }initializer; + + boost::thread *pThread; // Pointer on detection thread. + Detection *pDetMthd; // Pointer on detection method. + bool mMustStop; + boost::mutex mMustStopMutex; + string mStationName; // Name of the station (parameter from configuration file). + CamPixFmt mFormat; // Acquisition bit depth (parameter from configuration file). + Fits mFitsHeader; + bool mIsRunning; // Detection thread running status. + bool mWaitFramesToCompleteEvent; + int mNbWaitFrames; + string mCfgPath; + string mEventPath; // Path of the last detected event. + TimeDate::Date mEventDate; // Date of the last detected event. + int mNbDetection; // Number of detection. + bool mInterruptionStatus; + boost::mutex mInterruptionStatusMutex; + boost::circular_buffer *frameBuffer; + boost::mutex *frameBuffer_mutex; + boost::condition_variable *frameBuffer_condition; + bool *detSignal; + boost::mutex *detSignal_mutex; + boost::condition_variable *detSignal_condition; + string mCurrentDataSetLocation; + vector> mDetectionResults; + bool mForceToReset; + detectionParam mdtp; + dataParam mdp; + mailParam mmp; + fitskeysParam mfkp; + stationParam mstp; + int mNbFramesAround; // Number of frames to keep around an event. + + + public : + + DetThread( boost::circular_buffer *fb, + boost::mutex *fb_m, + boost::condition_variable *fb_c, + bool *dSignal, + boost::mutex *dSignal_m, + boost::condition_variable *dSignal_c, + detectionParam dtp, + dataParam dp, + mailParam mp, + stationParam sp, + fitskeysParam fkp, + CamPixFmt pfmt); + + ~DetThread(); + + void operator()(); + + bool startThread(); + + void stopThread(); + + bool buildEventDataDirectory(); + + /** + * Save an event in the directory "events". + * + * @param firstEvPosInFB First frame's number of the event. + * @param lastEvPosInFB Last frame's number of the event. + * @return Success to save an event. + */ + bool saveEventData(int firstEvPosInFB, int lastEvPosInFB); + + /** + * Run status of detection thread. + * + * @return Is running or not. + */ + bool getRunStatus(); + + /** + * Get detection method used by detection thread. + * + * @return Detection method. + */ + Detection* getDetMethod(); + + /** + * Interrupt detection thread. + * + */ + void interruptThread(); + + void updateDetectionReport() { + + if(mCurrentDataSetLocation != "") { + + mDetectionResults.push_back(pair(mCurrentDataSetLocation, mNbDetection)); + mNbDetection = 0; + + } + }; + + void setCurrentDataSet(string location) { + + mCurrentDataSetLocation = location; + + }; + +}; + + diff --git a/Detection.h b/Detection.h new file mode 100644 index 0000000..78c6c0a --- /dev/null +++ b/Detection.h @@ -0,0 +1,163 @@ +/* + Detection.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Detection.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/06/2014 +* \brief Detection interface. +*/ + +#pragma once + +#include "config.h" + +#ifdef LINUX + #define BOOST_LOG_DYN_LINK 1 +#endif + +#include "opencv2/highgui/highgui.hpp" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "ELogSeverityLevel.h" +#include +#include +#include +#include +#include "TimeDate.h" +#include "Fits2D.h" +#include "Fits.h" +#include "Frame.h" +#include "EStackMeth.h" +#include "ECamPixFmt.h" +#include +#include "SParam.h" + +using namespace boost::filesystem; +using namespace std; +using namespace cv; + +class Detection { + + private : + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public: + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("Detection")); + + } + + }initializer; + + public: + + virtual ~Detection() {}; + + /** + * Run meteor detection method. + * + * @param c Current frame. + * @return Success to perform analysis. + */ + virtual bool runDetection(Frame &c) { + + return false; + + }; + + /** + * Get the number of the first detected event's frame. + * + * @return Frame number. + */ + virtual int getEventFirstFrameNb() { + + return 0; + + }; + + /** + * Get the number of the last detected event's frame. + * + * @return Frame number. + */ + virtual int getEventLastFrameNb() { + + return 0; + + }; + + /** + * Get date of the detected event. + * + * @return Date of the event : YYYY-MM-DDTHH:MM:SS,fffffffff + */ + virtual TimeDate::Date getEventDate() { + + TimeDate::Date date; + return date; + + }; + + /** + * Reset detection method. + * + */ + virtual void resetDetection(bool loadNewDataSet){}; + + /** + * Reset mask. + * + */ + virtual void resetMask(){}; + + /** + * Save infos on the detected event. + * + */ + virtual void saveDetectionInfos(string p, int nbFramesAround){}; + + virtual vector getDebugFiles() {vector files; return files;}; + +}; + diff --git a/DetectionTemplate.cpp b/DetectionTemplate.cpp new file mode 100644 index 0000000..20c46fe --- /dev/null +++ b/DetectionTemplate.cpp @@ -0,0 +1,174 @@ +/* + DetectionTemplate.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file DetectionTemplate.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/03/2015 +*/ + +#include "DetectionTemplate.h" + +boost::log::sources::severity_logger< LogSeverityLevel > DetectionTemplate::logger; + +DetectionTemplate::Init DetectionTemplate::initializer; + +DetectionTemplate::DetectionTemplate(detectionParam dtp, CamPixFmt fmt):mImgNum(0), mDataSetCounter(0) { + + mdtp = dtp; + + mMaskControl = new Mask(10, dtp.ACQ_MASK_ENABLED, dtp.ACQ_MASK_PATH, dtp.DET_DOWNSAMPLE_ENABLED, fmt, true); + +} + +DetectionTemplate::~DetectionTemplate() { + if(mMaskControl != nullptr) + delete mMaskControl; +} + +void DetectionTemplate::createDebugDirectories(bool cleanDebugDirectory) { + +} + +bool DetectionTemplate::runDetection(Frame &c) { + + Mat currImg; + + if(mdtp.DET_DOWNSAMPLE_ENABLED) + pyrDown(c.mImg, currImg, Size(c.mImg.cols / 2, c.mImg.rows / 2)); + else + c.mImg.copyTo(currImg); + + // -------------------------------- + // OPERATIONS + // -------------------------------- + + if(mMaskControl->applyMask(currImg)) { + + // -------------------------------- + // Check previous frame. + // -------------------------------- + + if(!mPrevFrame.data) { + + cout << "PrevFrame has no data ! " << endl; + currImg.copyTo(mPrevFrame); + return false; + + } + + Mat absdiffImg; + cv::absdiff(currImg, mPrevFrame, absdiffImg); + //SaveImg::saveJPEG(Conversion::convertTo8UC1(absdiffImg), "/home/fripon/debug/absdiff/frame_" + Conversion::intToString(c.mFrameNumber)); + + // --------------------------------- + // Dilatation absolute difference. + // --------------------------------- + + int dilation_size = 2; + Mat element = getStructuringElement(MORPH_RECT, Size(2*dilation_size + 1, 2*dilation_size+1), Point(dilation_size, dilation_size)); + cv::dilate(absdiffImg, absdiffImg, element); + //SaveImg::saveJPEG(Conversion::convertTo8UC1(absdiffImg), "/home/fripon/debug/dilate/frame_" + Conversion::intToString(c.mFrameNumber)); + + // ---------------------------------- + // Threshold absolute difference. + // ---------------------------------- + + Mat absDiffBinaryMap = Mat(currImg.rows,currImg.cols, CV_8UC1,Scalar(0)); + Scalar meanAbsDiff, stddevAbsDiff; + cv::meanStdDev(absdiffImg, meanAbsDiff, stddevAbsDiff, mMaskControl->mCurrentMask); + int absDiffThreshold = meanAbsDiff[0] * 3; + + if(absdiffImg.type() == CV_16UC1) { + + unsigned short * ptrAbsDiff; + unsigned char * ptrMap; + + for(int i = 0; i < absdiffImg.rows; i++) { + + ptrAbsDiff = absdiffImg.ptr(i); + ptrMap = absDiffBinaryMap.ptr(i); + + for(int j = 0; j < absdiffImg.cols; j++){ + + if(ptrAbsDiff[j] > absDiffThreshold) { + ptrMap[j] = 255; + } + } + } + + //SaveImg::saveJPEG(absDiffBinaryMap, "/home/fripon/debug/thresh/frame_" + Conversion::intToString(c.mFrameNumber)); + + } + + currImg.copyTo(mPrevFrame); + + }else{ + + mPrevFrame.release(); + + } + + // No detection : return false + return false; + +} + +void DetectionTemplate::saveDetectionInfos(string p, int nbFramesAround) { + + +} + +void DetectionTemplate::resetDetection(bool loadNewDataSet) { + + +} + +void DetectionTemplate::resetMask() { + + +} + +int DetectionTemplate::getEventFirstFrameNb() { + + return 0; + +} + +TimeDate::Date DetectionTemplate::getEventDate() { + + TimeDate::Date d; + return d; + +} + +int DetectionTemplate::getEventLastFrameNb() { + + return 0; + +} diff --git a/DetectionTemplate.h b/DetectionTemplate.h new file mode 100644 index 0000000..3a0365d --- /dev/null +++ b/DetectionTemplate.h @@ -0,0 +1,142 @@ +/* + DetectionTemplate.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 03/03/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file DetectionTemplate.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/03/2015 +*/ + +#pragma once + +#include "config.h" + +#ifdef LINUX + #define BOOST_LOG_DYN_LINK 1 +#endif + +#include +#include "opencv2/highgui/highgui.hpp" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "ELogSeverityLevel.h" +#include "TimeDate.h" +#include "Fits2D.h" +#include "Fits.h" +#include "Frame.h" +#include "EStackMeth.h" +#include "ECamPixFmt.h" +#include "GlobalEvent.h" +#include "LocalEvent.h" +#include "Detection.h" +#include "EParser.h" +#include "SaveImg.h" +#include +#include +#include +#include +#include +#include "ImgProcessing.h" +#include "Mask.h" + +using namespace boost::filesystem; +namespace logging = boost::log; +namespace sinks = boost::log::sinks; +namespace attrs = boost::log::attributes; +namespace src = boost::log::sources; +namespace expr = boost::log::expressions; +namespace keywords = boost::log::keywords; +using namespace std; +using namespace cv; + +class DetectionTemplate : public Detection { + + private : + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public : + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("DetectionTemplate")); + + } + + }initializer; + + int mImgNum; // Current frame number. + Mat mPrevFrame; // Previous frame. + Mat mMask; // Mask applied to frames. + int mDataSetCounter; + detectionParam mdtp; + Mask *mMaskControl; + + + public : + + DetectionTemplate(detectionParam dtp, CamPixFmt fmt); + + ~DetectionTemplate(); + + void initMethod(string cfgPath); + + bool runDetection(Frame &c); + + void saveDetectionInfos(string p, int nbFramesAround); + + void resetDetection(bool loadNewDataSet); + + void resetMask(); + + int getEventFirstFrameNb(); + + TimeDate::Date getEventDate(); + + int getEventLastFrameNb(); + + private : + + void createDebugDirectories(bool cleanDebugDirectory); + +}; + + diff --git a/DetectionTemporal.cpp b/DetectionTemporal.cpp new file mode 100644 index 0000000..a66224a --- /dev/null +++ b/DetectionTemporal.cpp @@ -0,0 +1,1351 @@ +/* + DetectionTemporal.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file DetectionTemporal.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/06/2014 +* \brief Detection method by temporal movement. +*/ + +#include "DetectionTemporal.h" + +boost::log::sources::severity_logger< LogSeverityLevel > DetectionTemporal::logger; + +DetectionTemporal::Init DetectionTemporal::initializer; + +DetectionTemporal::DetectionTemporal(detectionParam dtp, CamPixFmt fmt) { + + mListColors.push_back(Scalar(0,0,139)); // DarkRed + mListColors.push_back(Scalar(0,0,255)); // Red + mListColors.push_back(Scalar(0,100,100)); // IndianRed + mListColors.push_back(Scalar(92,92,205)); // Salmon + mListColors.push_back(Scalar(0,140,255)); // DarkOrange + mListColors.push_back(Scalar(30,105,210)); // Chocolate + mListColors.push_back(Scalar(0,255,255)); // Yellow + mListColors.push_back(Scalar(140,230,240)); // Khaki + mListColors.push_back(Scalar(224,255,255)); // LightYellow + mListColors.push_back(Scalar(211,0,148)); // DarkViolet + mListColors.push_back(Scalar(147,20,255)); // DeepPink + mListColors.push_back(Scalar(255,0,255)); // Magenta + mListColors.push_back(Scalar(0,100,0)); // DarkGreen + mListColors.push_back(Scalar(0,128,128)); // Olive + mListColors.push_back(Scalar(0,255,0)); // Lime + mListColors.push_back(Scalar(212,255,127)); // Aquamarine + mListColors.push_back(Scalar(208,224,64)); // Turquoise + mListColors.push_back(Scalar(205,0,0)); // Blue + mListColors.push_back(Scalar(255,191,0)); // DeepSkyBlue + mListColors.push_back(Scalar(255,255,0)); // Cyan + + mImgNum = 0; + mDebugUpdateMask = false; + mSubdivisionStatus = false; + mDataSetCounter = 0; + mRoiSize[0] = 10; + mRoiSize[1] = 10; + mdtp = dtp; + + mMaskManager = new Mask(dtp.DET_UPDATE_MASK_FREQUENCY, dtp.ACQ_MASK_ENABLED, dtp.ACQ_MASK_PATH, dtp.DET_DOWNSAMPLE_ENABLED, fmt, dtp.DET_UPDATE_MASK); + + // Create local mask to eliminate single white pixels. + Mat maskTemp(3,3,CV_8UC1,Scalar(255)); + maskTemp.at(1, 1) = 0; + maskTemp.copyTo(mLocalMask); + + mdtp.DET_DEBUG_PATH = mdtp.DET_DEBUG_PATH + "/"; + + mDebugCurrentPath = mdtp.DET_DEBUG_PATH; + + // Create directories for debugging method. + if(dtp.DET_DEBUG) + createDebugDirectories(true); + +} + +DetectionTemporal::~DetectionTemporal() { + + if(mMaskManager != NULL) + delete mMaskManager; + +} + +void DetectionTemporal::resetDetection(bool loadNewDataSet){ + + BOOST_LOG_SEV(logger, notification) << "Clear global events list."; + mListGlobalEvents.clear(); + // Clear list of files to send by mail. + debugFiles.clear(); + mSubdivisionStatus = false; + mPrevThresholdedMap.release(); + mPrevFrame.release(); + + if(mdtp.DET_DEBUG && loadNewDataSet) { + mDataSetCounter++; + createDebugDirectories(false); + } +} + +void DetectionTemporal::resetMask(){ + + mMaskManager->resetMask(); + +} + +void DetectionTemporal::createDebugDirectories(bool cleanDebugDirectory){ + + mDebugCurrentPath = mdtp.DET_DEBUG_PATH + "debug_" + Conversion::intToString(mDataSetCounter) + "/" ; + + if(cleanDebugDirectory) { + + const boost::filesystem::path p0 = path(mdtp.DET_DEBUG_PATH); + + if(boost::filesystem::exists(p0)) { + boost::filesystem::remove_all(p0); + }else { + boost::filesystem::create_directories(p0); + } + + } + + const boost::filesystem::path p1 = path(mDebugCurrentPath); + + if(!boost::filesystem::exists(p1)) + boost::filesystem::create_directories(p1); + + vector debugSubDir; + debugSubDir.push_back("original"); + debugSubDir.push_back("absolute_difference"); + debugSubDir.push_back("event_map_initial"); + debugSubDir.push_back("event_map_filtered"); + debugSubDir.push_back("absolute_difference_dilated"); + debugSubDir.push_back("neg_difference_thresholded"); + debugSubDir.push_back("pos_difference_thresholded"); + debugSubDir.push_back("neg_difference"); + debugSubDir.push_back("pos_difference"); + + for(int i = 0; i< debugSubDir.size(); i++){ + + const boost::filesystem::path path(mDebugCurrentPath + debugSubDir.at(i)); + + if(!boost::filesystem::exists(path)) { + boost::filesystem::create_directories(path); + } + + } + +} + +void DetectionTemporal::saveDetectionInfos(string p, int nbFramesAround){ + + // Save ge map. + if(mdtp.temporal.DET_SAVE_GEMAP) { + SaveImg::saveBMP((*mGeToSave).getMapEvent(), p + "GeMap"); + debugFiles.push_back("GeMap.bmp"); + } + + // Save dir map. + if(mdtp.temporal.DET_SAVE_DIRMAP) { + SaveImg::saveBMP((*mGeToSave).getDirMap(), p + "DirMap"); + } + + // Save infos. + /*if(mdtp.temporal.DET_SAVE_GE_INFOS) { + + ofstream infFile; + string infFilePath = p + "GeInfos.txt"; + infFile.open(infFilePath.c_str()); + + infFile << " * AGE : " << (*mGeToSave).getAge() << "\n"; + infFile << " * AGE LAST ELEM : " << (*mGeToSave).getAgeLastElem() << "\n"; + infFile << " * LINEAR STATE : " << (*mGeToSave).getLinearStatus() << "\n"; + infFile << " * BAD POS : " << (*mGeToSave).getBadPos() << "\n"; + infFile << " * GOOD POS : " << (*mGeToSave).getGoodPos() << "\n"; + infFile << " * NUM FIRST FRAME : " << (*mGeToSave).getNumFirstFrame() << "\n"; + infFile << " * NUM LAST FRAME : " << (*mGeToSave).getNumLastFrame() << "\n"; + + float d = sqrt(pow((*mGeToSave).mainPts.back().x - (*mGeToSave).mainPts.front().x,2.0) + pow((*mGeToSave).mainPts.back().y - (*mGeToSave).mainPts.front().y,2.0)); + infFile << "\n * Distance between first and last : " << d << "\n"; + + infFile << "\n * MainPoints position : \n"; + for(int i = 0; i < (*mGeToSave).mainPts.size(); i++) + infFile << " (" << (*mGeToSave).mainPts.at(i).x << ";"<< (*mGeToSave).mainPts.at(i).y << ")\n"; + + infFile << "\n * MainPoints details : \n"; + for(int i = 0; i < (*mGeToSave).listA.size(); i++){ + + infFile << " A(" << (*mGeToSave).listA.at(i).x << ";" << (*mGeToSave).listA.at(i).y << ") ----> "; + infFile << " B(" << (*mGeToSave).listB.at(i).x << ";" << (*mGeToSave).listB.at(i).y << ") ----> "; + infFile << " C(" << (*mGeToSave).listC.at(i).x << ";" << (*mGeToSave).listC.at(i).y << ")\n"; + infFile << " u(" << (*mGeToSave).listu.at(i).x << ";" << (*mGeToSave).listu.at(i).y << ") "; + infFile << " v(" << (*mGeToSave).listv.at(i).x << ";" << (*mGeToSave).listv.at(i).y << ")\n"; + infFile << " Angle rad between BA' / BC = " << (*mGeToSave).listRad.at(i) << "\n"; + infFile << " Angle between BA' / BC = " << (*mGeToSave).listAngle.at(i) << "\n"; + + if((*mGeToSave).mainPtsValidity.at(i)) infFile << " NEW POSITION ACCEPTED\n\n"; + else infFile << " NEW POSITION REFUSED\n\n"; + + } + + infFile.close(); + }*/ + + // Save positions. + if(mdtp.temporal.DET_SAVE_POS) { + + ofstream posFile; + string posFilePath = p + "positions.txt"; + posFile.open(posFilePath.c_str()); + + // Number of the first frame associated to the event. + int numFirstFrame = -1; + + vector::iterator itLe; + for(itLe = (*mGeToSave).LEList.begin(); itLe!=(*mGeToSave).LEList.end(); ++itLe) { + + if(numFirstFrame == -1) + numFirstFrame = (*itLe).getNumFrame(); + + Point pos = (*itLe).getMassCenter(); + + int positionY = 0; + if(mdtp.DET_DOWNSAMPLE_ENABLED) { + pos*=2; + positionY = mPrevFrame.rows*2 - pos.y; + }else { + positionY = mPrevFrame.rows - pos.y; + } + + // NUM_FRAME POSITIONX POSITIONY (inversed) + string line = Conversion::intToString((*itLe).getNumFrame() - numFirstFrame + nbFramesAround) + " (" + Conversion::intToString(pos.x) + ";" + Conversion::intToString(positionY) + ") " + TimeDate::getIsoExtendedFormatDate((*itLe).mFrameAcqDate)+ "\n"; + posFile << line; + + } + + posFile.close(); + + } +} + +vector DetectionTemporal::getDebugFiles() { + + return debugFiles; + +} + +bool DetectionTemporal::runDetection(Frame &c) { + + + /*chrono::duration dur_elapsed; + auto start = chrono::high_resolution_clock::now();*/ + + if(!mSubdivisionStatus) { + + mSubdivisionPos.clear(); + + int h = c.mImg.rows; + int w = c.mImg.cols; + + if(mdtp.DET_DOWNSAMPLE_ENABLED) { + h /= 2; + w /= 2; + } + + ImgProcessing::subdivideFrame(mSubdivisionPos, 8, h, w); + mSubdivisionStatus = true; + + if(mdtp.DET_DEBUG) { + + Mat s = Mat(h, w,CV_8UC1,Scalar(0)); + + for(int i = 0; i < 8; i++) { + line(s, Point(0, i * (h/8)), Point(w - 1, i * (h/8)), Scalar(255), 1); + line(s, Point(i * (w/8), 0), Point(i * (w/8), h-1), Scalar(255), 1); + } + + SaveImg::saveBMP(s, mDebugCurrentPath + "subdivisions_map"); + + } + + }else { + + double tDownsample = 0; + double tAbsDiff = 0; + double tPosDiff = 0; + double tNegDiff = 0; + double tDilate = 0; + double tThreshold = 0; + double tStep1 = 0; + double tStep2 = 0; + double tStep3 = 0; + double tStep4 = 0; + double tTotal = (double)getTickCount(); + + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + /// %%%%%%%%%%%%%%%%%%%%%%%%%%% STEP 1 : FILETRING / THRESHOLDING %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + double tstep1 = (double)getTickCount(); + Mat currImg; + + // ------------------------------ + // Downsample current frame. + // ------------------------------- + + if(mdtp.DET_DOWNSAMPLE_ENABLED) { + + tDownsample = (double)getTickCount(); + pyrDown(c.mImg, currImg, Size(c.mImg.cols / 2, c.mImg.rows / 2)); + tDownsample = ((double)getTickCount() - tDownsample); + + }else { + + c.mImg.copyTo(currImg); + + } + + // Apply mask on currImg. + // If true is returned, it means that the mask has been updated and applied on currImg. Detection process can't continue. + // If false is returned, it means that the mask has not been updated. Detection process can continue. + if(!mMaskManager->applyMask(currImg)) { + + // -------------------------------- + // Check previous frame. + // -------------------------------- + + if(!mPrevFrame.data) { + + currImg.copyTo(mPrevFrame); + return false; + + } + + // -------------------------------- + // Differences. + // -------------------------------- + + Mat absdiffImg, posDiffImg, negDiffImg; + + // Absolute difference. + tAbsDiff = (double)getTickCount(); + cv::absdiff(currImg, mPrevFrame, absdiffImg); + tAbsDiff = (double)getTickCount() - tAbsDiff; + + // Positive difference. + tPosDiff = (double)getTickCount(); + cv::subtract(currImg,mPrevFrame,posDiffImg,mMaskManager->mCurrentMask); + tPosDiff = (double)getTickCount() - tPosDiff; + + // Negative difference. + tNegDiff = (double)getTickCount(); + cv::subtract(mPrevFrame,currImg,negDiffImg,mMaskManager->mCurrentMask); + tNegDiff = (double)getTickCount() - tNegDiff; + + // --------------------------------- + // Dilatate absolute difference. + // --------------------------------- + + tDilate = (double)getTickCount(); + int dilation_size = 2; + Mat element = getStructuringElement(MORPH_RECT, Size(2*dilation_size + 1, 2*dilation_size+1), Point(dilation_size, dilation_size)); + cv::dilate(absdiffImg, absdiffImg, element); + tDilate = (double)getTickCount() - tDilate; + + // ------------------------------------------------------------------------------ + // Threshold absolute difference / positive difference / negative difference + // ------------------------------------------------------------------------------ + + tThreshold = (double)getTickCount(); + Mat absDiffBinaryMap = ImgProcessing::thresholding(absdiffImg, mMaskManager->mCurrentMask, 3, Thresh::MEAN); + tThreshold = (double)getTickCount() - tThreshold; + + Scalar meanPosDiff, stddevPosDiff, meanNegDiff, stddevNegDiff; + meanStdDev(posDiffImg, meanPosDiff, stddevPosDiff, mMaskManager->mCurrentMask); + meanStdDev(negDiffImg, meanNegDiff, stddevNegDiff, mMaskManager->mCurrentMask); + int posThreshold = stddevPosDiff[0] * 5 + 10; + int negThreshold = stddevNegDiff[0] * 5 + 10; + + if(mdtp.DET_DEBUG) { + + Mat posBinaryMap = ImgProcessing::thresholding(posDiffImg, mMaskManager->mCurrentMask, 5, Thresh::STDEV); + Mat negBinaryMap = ImgProcessing::thresholding(negDiffImg, mMaskManager->mCurrentMask, 5, Thresh::STDEV); + + SaveImg::saveBMP(Conversion::convertTo8UC1(currImg), mDebugCurrentPath + "/original/frame_" + Conversion::intToString(c.mFrameNumber)); + SaveImg::saveBMP(posBinaryMap, mDebugCurrentPath + "/pos_difference_thresholded/frame_" + Conversion::intToString(c.mFrameNumber)); + SaveImg::saveBMP(negBinaryMap, mDebugCurrentPath + "/neg_difference_thresholded/frame_" + Conversion::intToString(c.mFrameNumber)); + SaveImg::saveBMP(absDiffBinaryMap, mDebugCurrentPath + "/absolute_difference_thresholded/frame_" + Conversion::intToString(c.mFrameNumber)); + SaveImg::saveBMP(absdiffImg, mDebugCurrentPath + "/absolute_difference/frame_"+Conversion::intToString(c.mFrameNumber)); + SaveImg::saveBMP(Conversion::convertTo8UC1(posDiffImg), mDebugCurrentPath + "/pos_difference/frame_" + Conversion::intToString(c.mFrameNumber)); + SaveImg::saveBMP(Conversion::convertTo8UC1(negDiffImg), mDebugCurrentPath + "/neg_difference/frame_" + Conversion::intToString(c.mFrameNumber)); + + } + + // Current frame is stored as the previous frame. + currImg.copyTo(mPrevFrame); + + tStep1 = (double)getTickCount() - tStep1; + + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% STEP 2 : FIND LOCAL EVENT %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + // SUMMARY : + // Loop binarized absolute difference image. + // For each white pixel, define a Region of interest (ROI) of 10x10 centered in this pixel. + // Create a new Local Event initialized with this first ROI or attach this ROI to an existing Local Event. + // Loop the ROI in the binarized absolute difference image to store position of white pixels. + // Loop the ROI in the positive difference image to store positions of white pixels. + // Loop the ROI in the negative difference image to store positions of white pixels. + // Once the list of Local Event has been completed : + // Analyze each local event in order to check that pixels can be clearly split in two groups (negative, positive). + + vector listLocalEvents; + vector::iterator itLE; + tStep2 = (double)getTickCount(); + + // Event map for the current frame. + Mat eventMap = Mat(currImg.rows,currImg.cols, CV_8UC3,Scalar(0,0,0)); + + // ---------------------------------- + // Search local events. + // ---------------------------------- + + // Iterator on list of sub-regions. + vector::iterator itR; + + for(itR = mSubdivisionPos.begin(); itR != mSubdivisionPos.end(); ++itR) { + + // Extract subdivision from binary map. + Mat subdivision = absDiffBinaryMap(Rect((*itR).x, (*itR).y, absDiffBinaryMap.cols/8, absDiffBinaryMap.rows/8)); + + // Check if there is white pixels. + if(countNonZero(subdivision) > 0){ + + string debugMsg = ""; + analyseRegion( subdivision, + absDiffBinaryMap, + eventMap, + posDiffImg, + posThreshold, + negDiffImg, + negThreshold, + listLocalEvents, + (*itR), + mdtp.temporal.DET_LE_MAX, + c.mFrameNumber, + debugMsg, + c.mDate); + } + } + + for(int i = 0; i < listLocalEvents.size(); i++) + listLocalEvents.at(i).setLeIndex(i); + + if(mdtp.DET_DEBUG) SaveImg::saveBMP(eventMap, mDebugCurrentPath + "/event_map_initial/frame_" + Conversion::intToString(c.mFrameNumber)); + + // ---------------------------------- + // Link between LE. + // ---------------------------------- + + int leNumber = listLocalEvents.size(); + + // Liste d'iterators sur la liste des localEvent contenant soit un cluster positif ou negatif. + vector::iterator > itLePos, itLeNeg; + + // Association d'un local event cluster positif avec un local event cluster negatif. + vector::iterator, vector::iterator> > itPair; + + itLE = listLocalEvents.begin(); + + // Search pos and neg alone. + while(itLE != listLocalEvents.end()) { + + // Le has pos cluster but no neg cluster. + if((*itLE).getPosClusterStatus() && !(*itLE).getNegClusterStatus()) { + itLePos.push_back(itLE); + }else if(!(*itLE).getPosClusterStatus() && (*itLE).getNegClusterStatus()){ + itLeNeg.push_back(itLE); + } + + ++itLE; + + } + + int maxRadius = 50; + + // Try to link a positive cluster to a negative one. + for(int i = 0; i < itLePos.size(); i++) { + + int nbPotentialNeg = 0; + + vector::iterator itChoose; + vector::iterator >::iterator c; + + for(vector::iterator >::iterator j = itLeNeg.begin(); j != itLeNeg.end();) { + + Point A = (*itLePos.at(i)).getMassCenter(); + Point B = (*(*j)).getMassCenter(); + float dist = sqrt(pow((A.x - B.x),2) + pow((A.y - B.y),2)); + + if(dist < 50) { + + nbPotentialNeg++; + itChoose = (*j); + c = j; + + } + + ++j; + } + + if(nbPotentialNeg == 1) { + + (*itLePos.at(i)).mergeWithAnOtherLE((*itChoose)); + (*itLePos.at(i)).setMergedStatus(true); + (*itChoose).setMergedStatus(true); + itLeNeg.erase(c); + + } + + } + + // Delete pos cluster not merged and negative cluster not merged. + itLE = listLocalEvents.begin(); + + // Search pos and neg alone. + while(itLE != listLocalEvents.end()) { + + // Le has pos cluster but no neg cluster. + if(// ((*itLE).getPosClusterStatus() && !(*itLE).getNegClusterStatus() && !(*itLE).getMergedStatus())|| + (!(*itLE).getPosClusterStatus() && (*itLE).getNegClusterStatus()&& (*itLE).getMergedStatus())) { + itLE = listLocalEvents.erase(itLE); + }else { + ++itLE; + } + } + + // ----------------------------------- + // Circle TEST. + // ----------------------------------- + + leNumber = listLocalEvents.size(); + + itLE = listLocalEvents.begin(); + + while(itLE != listLocalEvents.end()) { + + if((*itLE).getPosClusterStatus() && (*itLE).getNegClusterStatus()) { + + if((*itLE).localEventIsValid()) { + ++itLE; + }else { + itLE = listLocalEvents.erase(itLE); + } + + }else { + ++itLE; + } + } + + if(mdtp.DET_DEBUG) { + + Mat eventMapFiltered = Mat(currImg.rows,currImg.cols, CV_8UC3,Scalar(0,0,0)); + + for(int i = 0; i < listLocalEvents.size(); i++) { + + Mat roiF(10, 10, CV_8UC3, listLocalEvents.at(i).getColor()); + + for(int j = 0; j < listLocalEvents.at(i).mLeRoiList.size();j++) { + if( listLocalEvents.at(i).mLeRoiList.at(j).x-5 > 0 && + listLocalEvents.at(i).mLeRoiList.at(j).x+5 < eventMapFiltered.cols && + listLocalEvents.at(i).mLeRoiList.at(j).y-5 > 0 && + listLocalEvents.at(i).mLeRoiList.at(j).y+5 < eventMapFiltered.rows) { + roiF.copyTo(eventMapFiltered(Rect(listLocalEvents.at(i).mLeRoiList.at(j).x - 5, listLocalEvents.at(i).mLeRoiList.at(j).y - 5, 10, 10))); + } + } + + } + + SaveImg::saveBMP(eventMapFiltered, mDebugCurrentPath + "/event_map_filtered/frame_" + Conversion::intToString(c.mFrameNumber)); + + } + + tStep2 = (double)getTickCount() - tStep2; + + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + /// %%%%%%%%%%%%%%%%%%%%%%%%%% STEP 3 : ATTACH LE TO GE OR CREATE NEW ONE %%%%%%%%%%%%%%%%%%%%%%%%% + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + // SUMMARY : + // Loop list of local events. + // Create a new global event initialized with the current Local event or attach it to an existing global event. + // If attached, check the positive-negative couple of the global event. + + // Iterator on list of global event. + vector::iterator itGE; + + tStep3 = (double)getTickCount(); + + itLE = listLocalEvents.begin(); + + while(itLE != listLocalEvents.end()) { + + bool LELinked = false; + vector::iterator itGESelected; + bool GESelected = false; + + (*itLE).setNumFrame(c.mFrameNumber); + + for(itGE = mListGlobalEvents.begin(); itGE != mListGlobalEvents.end(); ++itGE){ + + Mat res = (*itLE).getMap() & (*itGE).getMapEvent(); + + if(countNonZero(res) > 0){ + + LELinked = true; + + // The current LE has found a possible global event. + if(GESelected){ + + //cout << "The current LE has found a possible global event."<< endl; + + // Choose the older global event. + if((*itGE).getAge() > (*itGESelected).getAge()){ + + //cout << "Choose the older global event."<< endl; + itGESelected = itGE; + + } + + }else{ + + //cout << "Keep same"<< endl; + itGESelected = itGE; + GESelected = true; + + } + + break; + + } + } + + // Add current LE to an existing GE + if(GESelected){ + + //cout << "Add current LE to an existing GE ... "<< endl; + // Add LE. + (*itGESelected).addLE((*itLE)); + //cout << "Flag to indicate that a local event has been added ... "<< endl; + // Flag to indicate that a local event has been added. + (*itGESelected).setNewLEStatus(true); + //cout << "reset age of the last local event received by the global event.... "<< endl; + // reset age of the last local event received by the global event. + (*itGESelected).setAgeLastElem(0); + + }else{ + + // The current LE has not been linked. It became a new GE. + if(mListGlobalEvents.size() < mdtp.temporal.DET_GE_MAX){ + + //cout << "Selecting last available color ... "<< endl; + Scalar geColor = Scalar(255,255,255);//availableGeColor.back(); + //cout << "Deleting last available color ... "<< endl; + //availableGeColor.pop_back(); + //cout << "Creating new GE ... "<< endl; + GlobalEvent newGE(c.mDate, c.mFrameNumber, currImg.rows, currImg.cols, geColor); + //cout << "Adding current LE ... "<< endl; + newGE.addLE((*itLE)); + //cout << "Pushing new LE to GE list ... "<< endl; + //Add the new globalEvent to the globalEvent's list + mListGlobalEvents.push_back(newGE); + + } + } + + itLE = listLocalEvents.erase(itLE); // Delete the current localEvent. + + } + + tStep3 = (double)getTickCount() - tStep3; + + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% STEP 4 : MANAGE LIST GLOBAL EVENT %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + /// %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + tStep4 = (double)getTickCount(); // Count process time of step 4. + itGE = mListGlobalEvents.begin(); // Iterator on global event list. + bool saveSignal = false; // Returned signal to indicate to save a GE or not. + + // Loop global event list to check their characteristics. + while(itGE != mListGlobalEvents.end()) { + + (*itGE).setAge((*itGE).getAge() + 1); // Increment age. + + // If the current global event has not received any new local event. + if(!(*itGE).getNewLEStatus()){ + + (*itGE).setAgeLastElem((*itGE).getAgeLastElem()+1); // Increment its "Without any new local event age" + + }else{ + + (*itGE).setNumLastFrame(c.mFrameNumber); + (*itGE).setNewLEStatus(false); + } + + string msgGe = ""; + + // CASE 1 : FINISHED EVENT. + if((*itGE).getAgeLastElem() > 5){ + + // Linear profil ? Minimum duration respected ? + if((*itGE).LEList.size() >= 5 + && (*itGE).continuousGoodPos(4, msgGe) + && (*itGE).ratioFramesDist(msgGe) + && (*itGE).negPosClusterFilter(msgGe)){ + + mGeToSave = itGE; + saveSignal = true; + break; + + }else{ + itGE = mListGlobalEvents.erase(itGE); // Delete the event. + } + + // CASE 2 : NOT FINISHED EVENT. + }else{ + + int nbsec = TimeDate::secBetweenTwoDates((*itGE).getDate(), c.mDate); + bool maxtime = false; + if(nbsec > mdtp.DET_TIME_MAX) + maxtime = true; + + // Check some characteristics : Too long event ? not linear ? + if( maxtime + || (!(*itGE).getLinearStatus() + && !(*itGE).continuousGoodPos(5,msgGe)) + || (!(*itGE).getLinearStatus() + && (*itGE).continuousBadPos((int)(*itGE).getAge()/2))){ + + itGE = mListGlobalEvents.erase(itGE); // Delete the event. + + if(maxtime) { + + TimeDate::Date gedate = (*itGE).getDate(); + BOOST_LOG_SEV(logger, notification) << "# GE deleted because max time reached : "; + string m = "- (*itGE).getDate() : " + + Conversion::numbering(4, gedate.year) + Conversion::intToString(gedate.year) + + Conversion::numbering(2, gedate.month) + Conversion::intToString(gedate.month) + + Conversion::numbering(2, gedate.day) + Conversion::intToString(gedate.day) + "T" + + Conversion::numbering(2, gedate.hours) + Conversion::intToString(gedate.hours) + + Conversion::numbering(2, gedate.minutes) + Conversion::intToString(gedate.minutes) + + Conversion::numbering(2, gedate.seconds) + Conversion::intToString((int)gedate.seconds); + + BOOST_LOG_SEV(logger, notification) << m; + + BOOST_LOG_SEV(logger, notification) << "- c.mDate : " + << Conversion::numbering(4, c.mDate.year) << Conversion::intToString(c.mDate.year) + << Conversion::numbering(2, c.mDate.month) << Conversion::intToString(c.mDate.month) + << Conversion::numbering(2, c.mDate.day) << Conversion::intToString(c.mDate.day) << "T" + << Conversion::numbering(2, c.mDate.hours) << Conversion::intToString(c.mDate.hours) + << Conversion::numbering(2, c.mDate.minutes) << Conversion::intToString(c.mDate.minutes) + << Conversion::numbering(2, c.mDate.seconds) << Conversion::intToString((int)c.mDate.seconds); + + BOOST_LOG_SEV(logger, notification) << "- difftime in sec : " << nbsec; + BOOST_LOG_SEV(logger, notification) << "- maxtime in sec : " << mdtp.DET_TIME_MAX; + + } + + // Let the GE alive. + }else if(c.mFrameRemaining < 10 && c.mFrameRemaining != 0){ + + if((*itGE).LEList.size() >= 5 && (*itGE).continuousGoodPos(4,msgGe) && (*itGE).ratioFramesDist(msgGe)&& (*itGE).negPosClusterFilter(msgGe)){ + + mGeToSave = itGE; + saveSignal = true; + break; + + }else{ + itGE = mListGlobalEvents.erase(itGE); // Delete the event. + } + + }else{ + ++itGE; // Do nothing to the current GE, check the following one. + } + } + } + + tStep4 = (double)getTickCount() - tStep4; + tTotal = (double)getTickCount() - tTotal; + + //dur_elapsed = chrono::high_resolution_clock::now() - start; + //cerr<<"Meteor "< DetectionTemporal::getColorInEventMap(Mat &eventMap, Point roiCenter) { + + // ROI in the eventMap. + Mat roi; + + // ROI extraction from the eventmap. + eventMap(Rect(roiCenter.x-mRoiSize[0]/2, roiCenter.y-mRoiSize[1]/2, mRoiSize[0], mRoiSize[1])).copyTo(roi); + + unsigned char *ptr = (unsigned char*)roi.data; + + int cn = roi.channels(); + + vector listColor; + + bool exist = false; + + for(int i = 0; i < roi.rows; i++){ + + for(int j = 0; j < roi.cols; j++){ + + Scalar bgrPixel; + bgrPixel.val[0] = ptr[i*roi.cols*cn + j*cn + 0]; // B + bgrPixel.val[1] = ptr[i*roi.cols*cn + j*cn + 1]; // G + bgrPixel.val[2] = ptr[i*roi.cols*cn + j*cn + 2]; // R + + if(bgrPixel.val[0] != 0 || bgrPixel.val[1] != 0 || bgrPixel.val[2] != 0){ + + for(int k = 0; k < listColor.size(); k++){ + + if(bgrPixel == listColor.at(k)){ + + exist = true; + break; + + } + } + + if(!exist) + listColor.push_back(bgrPixel); + + exist = false; + + } + } + } + + return listColor; + +} + +void DetectionTemporal::colorRoiInBlack(Point p, int h, int w, Mat ®ion){ + + int posX = p.x - w; + int posY = p.y - h; + + if(p.x - w < 0) { + + w = p.x + w/2; + posX = 0; + + }else if(p.x + w/2 > region.cols) { + + w = region.cols - p.x + w/2; + + } + + if(p.y - h < 0) { + + h = p.y + h/2; + posY = 0; + + + }else if(p.y + h/2 > region.rows) { + + h = region.rows - p.y + h/2; + + } + + // Color roi in black in the current region. + Mat roiBlackRegion(h, w, CV_8UC1, Scalar(0)); + roiBlackRegion.copyTo(region(Rect(posX, posY, w, h))); + +} + +void DetectionTemporal::analyseRegion( Mat &subdivision, + Mat &absDiffBinaryMap, + Mat &eventMap, + Mat &posDiff, + int posDiffThreshold, + Mat &negDiff, + int negDiffThreshold, + vector &listLE, + Point subdivisionPos, // Origin position of a region in frame (corner top left) + int maxNbLE, + int numFrame, + string &msg, + TimeDate::Date cFrameDate){ + +int situation = 0; +int nbCreatedLE = 0; +int nbRoiAttachedToLE = 0; +int nbNoCreatedLE = 0; +int nbROI = 0; +int nbRoiNotAnalysed = 0; +int roicounter = 0; + +unsigned char * ptr; + +// Loop pixel's subdivision. +for(int i = 0; i < subdivision.rows; i++) { + + ptr = subdivision.ptr(i); + + for(int j = 0; j < subdivision.cols; j++) { + + // Pixel is white. + if((int)ptr[j] > 0) { + + // Check if we are not out of frame range when a ROI is defined at the current pixel location. + if((subdivisionPos.y + i - mRoiSize[1]/2 > 0) && + (subdivisionPos.y + i + mRoiSize[1]/2 < absDiffBinaryMap.rows) && + (subdivisionPos.x + j - mRoiSize[0]/2 > 0) && + (subdivisionPos.x + j + mRoiSize[0]/2 < absDiffBinaryMap.cols)) { + + msg = msg + + "Analyse ROI (" + + Conversion::intToString(subdivisionPos.x + j) + ";" + Conversion::intToString(subdivisionPos.y + i) + ")\n"; + + nbROI++; + roicounter++; + // Get colors in eventMap at the current ROI location. + vector listColorInRoi = getColorInEventMap(eventMap, Point(subdivisionPos.x + j, subdivisionPos.y + i)); + + if(listColorInRoi.size() == 0) situation = 0; // black color = create a new local event + if(listColorInRoi.size() == 1) situation = 1; // one color = add the current roi to an existing local event + if(listColorInRoi.size() > 1) situation = 2; // several colors = make a decision + + switch(situation) { + + case 0 : + + { + + if(listLE.size() < maxNbLE) { + + msg = msg + + "->CREATE New Local EVENT\n" + + " - Initial position : (" + + Conversion::intToString(subdivisionPos.x + j) + ";" + Conversion::intToString(subdivisionPos.y + i) + ")\n" + + " - Color : (" + Conversion::intToString(mListColors.at(listLE.size())[0]) + ";" + + Conversion::intToString(mListColors.at(listLE.size())[1]) + ";" + + Conversion::intToString(mListColors.at(listLE.size())[2]) + ")\n"; + + // Create new localEvent object. + LocalEvent newLocalEvent( mListColors.at(listLE.size()), + Point(subdivisionPos.x + j, subdivisionPos.y + i), + absDiffBinaryMap.rows, + absDiffBinaryMap.cols, + mRoiSize); + + // Extract white pixels in ROI. + vector whitePixAbsDiff,whitePixPosDiff, whitePixNegDiff; + Mat roiAbsDiff, roiPosDiff, roiNegDiff; + + absDiffBinaryMap(Rect(subdivisionPos.x + j - mRoiSize[0]/2, subdivisionPos.y + i - mRoiSize[1]/2, mRoiSize[0], mRoiSize[1])).copyTo(roiAbsDiff); + posDiff(Rect(subdivisionPos.x + j - mRoiSize[0]/2, subdivisionPos.y + i - mRoiSize[1]/2, mRoiSize[0], mRoiSize[1])).copyTo(roiPosDiff); + negDiff(Rect(subdivisionPos.x + j - mRoiSize[0]/2, subdivisionPos.y + i - mRoiSize[1]/2, mRoiSize[0], mRoiSize[1])).copyTo(roiNegDiff); + + if(roiPosDiff.type() == CV_16UC1 && roiNegDiff.type() == CV_16UC1) { + + unsigned char * ptrRoiAbsDiff; + unsigned short * ptrRoiPosDiff; + unsigned short * ptrRoiNegDiff; + + for(int a = 0; a < roiAbsDiff.rows; a++) { + + ptrRoiAbsDiff = roiAbsDiff.ptr(a); + ptrRoiPosDiff = roiPosDiff.ptr(a); + ptrRoiNegDiff = roiNegDiff.ptr(a); + + for(int b = 0; b < roiAbsDiff.cols; b++){ + + if(ptrRoiAbsDiff[b] > 0) whitePixAbsDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + if(ptrRoiPosDiff[b] > posDiffThreshold) whitePixPosDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + if(ptrRoiNegDiff[b] > negDiffThreshold) whitePixNegDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + + } + } + + }else if(roiPosDiff.type() == CV_8UC1 && roiNegDiff.type() == CV_8UC1) { + + unsigned char * ptrRoiAbsDiff; + unsigned char * ptrRoiPosDiff; + unsigned char * ptrRoiNegDiff; + + for(int a = 0; a < roiAbsDiff.rows; a++) { + + ptrRoiAbsDiff = roiAbsDiff.ptr(a); + ptrRoiPosDiff = roiPosDiff.ptr(a); + ptrRoiNegDiff = roiNegDiff.ptr(a); + + for(int b = 0; b < roiAbsDiff.cols; b++){ + + if(ptrRoiAbsDiff[b] > 0) whitePixAbsDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + if(ptrRoiPosDiff[b] > posDiffThreshold) whitePixPosDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + if(ptrRoiNegDiff[b] > negDiffThreshold) whitePixNegDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + + } + } + } + + msg = msg + "Number white pix in abs diff : " + Conversion::intToString(whitePixAbsDiff.size()) + "\n"; + msg = msg + "Number white pix in pos diff : " + Conversion::intToString(whitePixPosDiff.size()) + "\n"; + msg = msg + "Number white pix in neg diff : " + Conversion::intToString(whitePixNegDiff.size()) + "\n"; + + newLocalEvent.addAbs(whitePixAbsDiff); + newLocalEvent.addPos(whitePixPosDiff); + newLocalEvent.addNeg(whitePixNegDiff); + + // Update center of mass. + newLocalEvent.computeMassCenter(); + msg = msg + + " - Center of mass abs pixels : (" + + Conversion::intToString(newLocalEvent.getMassCenter().x) + ";" + Conversion::intToString(newLocalEvent.getMassCenter().y) + ")\n"; + + // Save the frame number where the local event has been created. + newLocalEvent.setNumFrame(numFrame); + // Save acquisition date of the frame. + newLocalEvent.mFrameAcqDate = cFrameDate; + // Add LE in the list of localEvent. + listLE.push_back(newLocalEvent); + // Update eventMap with the color of the new localEvent. + Mat roi(mRoiSize[1], mRoiSize[0], CV_8UC3, mListColors.at(listLE.size()-1)); + roi.copyTo(eventMap(Rect(subdivisionPos.x + j-mRoiSize[0]/2, subdivisionPos.y + i-mRoiSize[1]/2, mRoiSize[0], mRoiSize[1]))); + // Color roi in black in the current region. + colorRoiInBlack(Point(j,i), mRoiSize[1], mRoiSize[0], subdivision); + + colorRoiInBlack(Point(subdivisionPos.x +j,subdivisionPos.y +i), mRoiSize[1], mRoiSize[0], absDiffBinaryMap); + colorRoiInBlack(Point(subdivisionPos.x +j,subdivisionPos.y +i), mRoiSize[1], mRoiSize[0], posDiff); + colorRoiInBlack(Point(subdivisionPos.x +j,subdivisionPos.y +i), mRoiSize[1], mRoiSize[0], negDiff); + + nbCreatedLE++; + + }else { + + nbNoCreatedLE++; + + } + } + + break; + + case 1 : + + { + + vector::iterator it; + int index = 0; + for(it=listLE.begin(); it!=listLE.end(); ++it){ + + // Try to find a local event which has the same color. + if((*it).getColor() == listColorInRoi.at(0)){ + + msg = msg + + "->Attach ROI (" + + Conversion::intToString(subdivisionPos.x + j) + ";" + Conversion::intToString(subdivisionPos.y + i) + ") with LE " + Conversion::intToString(index) + "\n"; + + // Extract white pixels in ROI. + vector whitePixAbsDiff,whitePixPosDiff, whitePixNegDiff; + Mat roiAbsDiff, roiPosDiff, roiNegDiff; + + absDiffBinaryMap(Rect(subdivisionPos.x + j - mRoiSize[0]/2, subdivisionPos.y + i - mRoiSize[1]/2, mRoiSize[0], mRoiSize[1])).copyTo(roiAbsDiff); + posDiff(Rect(subdivisionPos.x + j - mRoiSize[0]/2, subdivisionPos.y + i - mRoiSize[1]/2, mRoiSize[0], mRoiSize[1])).copyTo(roiPosDiff); + negDiff(Rect(subdivisionPos.x + j - mRoiSize[0]/2, subdivisionPos.y + i - mRoiSize[1]/2, mRoiSize[0], mRoiSize[1])).copyTo(roiNegDiff); + + if(roiPosDiff.type() == CV_16UC1 && roiNegDiff.type() == CV_16UC1) { + + unsigned char * ptrRoiAbsDiff; + unsigned short * ptrRoiPosDiff; + unsigned short * ptrRoiNegDiff; + + for(int a = 0; a < roiAbsDiff.rows; a++) { + + ptrRoiAbsDiff = roiAbsDiff.ptr(a); + ptrRoiPosDiff = roiPosDiff.ptr(a); + ptrRoiNegDiff = roiNegDiff.ptr(a); + + for(int b = 0; b < roiAbsDiff.cols; b++){ + + if(ptrRoiAbsDiff[b] > 0) whitePixAbsDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + if(ptrRoiPosDiff[b] > posDiffThreshold) whitePixPosDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + if(ptrRoiNegDiff[b] > negDiffThreshold) whitePixNegDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + + } + } + + }else if(roiPosDiff.type() == CV_8UC1 && roiNegDiff.type() == CV_8UC1) { + + unsigned char * ptrRoiAbsDiff; + unsigned char * ptrRoiPosDiff; + unsigned char * ptrRoiNegDiff; + + for(int a = 0; a < roiAbsDiff.rows; a++) { + + ptrRoiAbsDiff = roiAbsDiff.ptr(a); + ptrRoiPosDiff = roiPosDiff.ptr(a); + ptrRoiNegDiff = roiNegDiff.ptr(a); + + for(int b = 0; b < roiAbsDiff.cols; b++){ + + if(ptrRoiAbsDiff[b] > 0) whitePixAbsDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + if(ptrRoiPosDiff[b] > posDiffThreshold) whitePixPosDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + if(ptrRoiNegDiff[b] > negDiffThreshold) whitePixNegDiff.push_back(Point(subdivisionPos.x + j - mRoiSize[0]/2 + b, subdivisionPos.y + i - mRoiSize[1]/2 + a)); + + } + } + } + + msg = msg + "Number white pix in abs diff : " + Conversion::intToString(whitePixAbsDiff.size()) + "\n"; + msg = msg + "Number white pix in pos diff : " + Conversion::intToString(whitePixPosDiff.size()) + "\n"; + msg = msg + "Number white pix in neg diff : " + Conversion::intToString(whitePixNegDiff.size()) + "\n"; + + (*it).addAbs(whitePixAbsDiff); + (*it).addPos(whitePixPosDiff); + (*it).addNeg(whitePixNegDiff); + + // Add the current roi. + (*it).mLeRoiList.push_back(Point(subdivisionPos.x + j, subdivisionPos.y + i)); + // Set local event 's map + (*it).setMap(Point(subdivisionPos.x + j - mRoiSize[0]/2, subdivisionPos.y + i - mRoiSize[1]/2), mRoiSize[1], mRoiSize[0]); + // Update center of mass + (*it).computeMassCenter(); + msg = msg + + " - Update Center of mass abs pixels of LE " + Conversion::intToString(index) + " : (" + + Conversion::intToString((*it).getMassCenter().x) + ";" + Conversion::intToString((*it).getMassCenter().y) + ")\n"; + + // Update eventMap with the color of the new localEvent + Mat roi(mRoiSize[1], mRoiSize[0], CV_8UC3, listColorInRoi.at(0)); + roi.copyTo(eventMap(Rect(subdivisionPos.x + j-mRoiSize[0]/2, subdivisionPos.y + i-mRoiSize[1]/2,mRoiSize[0],mRoiSize[1]))); + // Color roi in black in thresholded frame. + Mat roiBlack(mRoiSize[1],mRoiSize[0],CV_8UC1,Scalar(0)); + roiBlack.copyTo(absDiffBinaryMap(Rect(subdivisionPos.x + j-mRoiSize[0]/2, subdivisionPos.y + i-mRoiSize[1]/2,mRoiSize[0],mRoiSize[1]))); + // Color roi in black in the current region. + colorRoiInBlack(Point(j,i), mRoiSize[1], mRoiSize[0], subdivision); + + colorRoiInBlack(Point(subdivisionPos.x +j,subdivisionPos.y +i), mRoiSize[1], mRoiSize[0], absDiffBinaryMap); + colorRoiInBlack(Point(subdivisionPos.x +j,subdivisionPos.y +i), mRoiSize[1], mRoiSize[0], posDiff); + colorRoiInBlack(Point(subdivisionPos.x +j,subdivisionPos.y +i), mRoiSize[1], mRoiSize[0], negDiff); + + nbRoiAttachedToLE++; + + break; + + } + + index++; + } + } + + break; + + case 2 : + + { + nbRoiNotAnalysed++; + + /* vector::iterator it; + vector::iterator itLEbase; + it = listLE.begin(); + + vector::iterator it2; + it2 = listColorInRoi.begin(); + + bool LE = false; + bool colorFound = false; + + while (it != listLE.end()){ + + // Check if the current LE have a color. + while (it2 != listColorInRoi.end()){ + + if((*it).getColor() == (*it2)){ + + colorFound = true; + it2 = listColorInRoi.erase(it2); + break; + + } + + ++it2; + } + + if(colorFound){ + + if(!LE){ + + itLEbase = it; + LE = true; + + (*it).LE_Roi.push_back(Point(areaPosition.x + j, areaPosition.y + i)); + + Mat tempMat = (*it).getMap(); + Mat roiTemp(roiSize[1],roiSize[0],CV_8UC1,Scalar(255)); + roiTemp.copyTo(tempMat(Rect(areaPosition.x + j - roiSize[0]/2, areaPosition.y + i - roiSize[1]/2, roiSize[0], roiSize[1]))); + (*it).setMap(tempMat); + + // Update center of mass + (*it).computeMassCenterWithRoi(); + + // Update eventMap with the color of the new localEvent group + Mat roi(roiSize[1], roiSize[0], CV_8UC3, listColorInRoi.at(0)); + roi.copyTo(eventMap(Rect(areaPosition.x + j-roiSize[0]/2, areaPosition.y + i-roiSize[1]/2,roiSize[0],roiSize[1]))); + + colorInBlack(j, i, areaPosX, areaPosY, areaPosition, area, frame); + + }else{ + + // Merge LE data + + Mat temp = (*it).getMap(); + Mat temp2 = (*itLEbase).getMap(); + Mat temp3 = temp + temp2; + (*itLEbase).setMap(temp3); + + (*itLEbase).LE_Roi.insert((*itLEbase).LE_Roi.end(), (*it).LE_Roi.begin(), (*it).LE_Roi.end()); + + it = listLE.erase(it); + + } + + colorFound = false; + + }else{ + + ++it; + + } + }*/ + + } + + break; + } + } + } + } +} + +msg = msg + + "--> RESUME REGION ANALYSE : \n" + + "Number of analysed ROI : " + + Conversion::intToString(nbROI) + "\n" + + "Number of not analysed ROI : " + + Conversion::intToString(nbRoiNotAnalysed) + "\n" + + "Number of new LE : " + + Conversion::intToString(nbCreatedLE) + "\n" + + "Number of updated LE :" + + Conversion::intToString(nbRoiAttachedToLE) + "\n"; + +} + + +/* + +// Create debug video. +if(dtp.DET_DEBUG_VIDEO) + mVideoDebug = VideoWriter(mDebugCurrentPath + "debug-video.avi", CV_FOURCC('M', 'J', 'P', 'G'), 5, Size(static_cast(1280), static_cast(960)), true); + +if(mdtp.DET_DEBUG_VIDEO){ + + // Create GE memory image + Mat GEMAP = Mat(currImg.rows, currImg.cols, CV_8UC3, Scalar(0,0,0)); + for(itGE = mListGlobalEvents.begin(); itGE!= mListGlobalEvents.end(); ++itGE){ + + GEMAP = GEMAP + (*itGE).getGeMapColor(); + + } + + if(mdtp.DET_DEBUG) SaveImg::saveBMP(GEMAP, mDebugCurrentPath + "/GEMAP/GEMAP_"+Conversion::intToString(c.mFrameNumber)); + + Mat VIDEO = Mat(960,1280, CV_8UC3,Scalar(255,255,255)); + Mat VIDEO_frame = Mat(470,630, CV_8UC3,Scalar(0,0,0)); + Mat VIDEO_diffFrame = Mat(470,630, CV_8UC3,Scalar(0,0,0)); + Mat VIDEO_threshFrame = Mat(470,630, CV_8UC3,Scalar(0,0,0)); + Mat VIDEO_eventFrame = Mat(470,630, CV_8UC3,Scalar(0,0,0)); + Mat VIDEO_geFrame = Mat(470,630, CV_8UC3,Scalar(0,0,0)); + + cvtColor(currImg, currImg, CV_GRAY2BGR); + resize(currImg, VIDEO_frame, Size(630,470), 0, 0, INTER_LINEAR ); + cvtColor(absDiffBinaryMap, absDiffBinaryMap, CV_GRAY2BGR); + resize(absDiffBinaryMap, VIDEO_threshFrame, Size(630,470), 0, 0, INTER_LINEAR ); + resize(eventMap, VIDEO_eventFrame, Size(630,470), 0, 0, INTER_LINEAR ); + resize(GEMAP, VIDEO_geFrame, Size(630,470), 0, 0, INTER_LINEAR ); + + copyMakeBorder(VIDEO_frame, VIDEO_frame, 5, 5, 5, 5, BORDER_CONSTANT, Scalar(255,255,255) ); + copyMakeBorder(VIDEO_threshFrame, VIDEO_threshFrame, 5, 5, 5, 5, BORDER_CONSTANT, Scalar(255,255,255) ); + copyMakeBorder(VIDEO_eventFrame, VIDEO_eventFrame, 5, 5, 5, 5, BORDER_CONSTANT, Scalar(255,255,255) ); + copyMakeBorder(VIDEO_geFrame, VIDEO_geFrame, 5, 5, 5, 5, BORDER_CONSTANT, Scalar(255,255,255) ); + + putText(VIDEO_frame, "Original", cvPoint(300,450),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + putText(VIDEO_threshFrame, "Filtering", cvPoint(300,450),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + putText(VIDEO_eventFrame, "Local Event Map", cvPoint(300,450),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + putText(VIDEO_geFrame, "Global Event Map", cvPoint(300,450),FONT_HERSHEY_COMPLEX_SMALL, 0.8, cvScalar(0,0,255), 1, CV_AA); + + VIDEO_frame.copyTo(VIDEO(Rect(0, 0, 640, 480))); + VIDEO_threshFrame.copyTo(VIDEO(Rect(640, 0, 640, 480))); + VIDEO_eventFrame.copyTo(VIDEO(Rect(0, 480, 640, 480))); + VIDEO_geFrame.copyTo(VIDEO(Rect(640, 480, 640, 480))); + + string fn = Conversion::intToString(c.mFrameNumber); + const char * fn_c; + fn_c = fn.c_str(); + + putText(VIDEO, fn_c, cvPoint(30,50),FONT_HERSHEY_COMPLEX_SMALL, 2, cvScalar(0,255,0), 2, CV_AA); + + if(mVideoDebug.isOpened()){ + + mVideoDebug << VIDEO; + + } + +} + +*/ diff --git a/DetectionTemporal.h b/DetectionTemporal.h new file mode 100644 index 0000000..74be68d --- /dev/null +++ b/DetectionTemporal.h @@ -0,0 +1,193 @@ +/* + DetectionTemporal.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/10/2014 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file DetectionTemporal.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 03/06/2014 +* \brief Detection method by temporal analysis. +*/ + +#pragma once + +#include "config.h" + +#ifdef LINUX + #define BOOST_LOG_DYN_LINK 1 +#endif + +#include +#include "opencv2/highgui/highgui.hpp" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "ELogSeverityLevel.h" +#include "TimeDate.h" +#include "Fits2D.h" +#include "Fits.h" +#include "Frame.h" +#include "ImgProcessing.h" +#include "EStackMeth.h" +#include "ECamPixFmt.h" +#include "GlobalEvent.h" +#include "LocalEvent.h" +#include "Detection.h" +#include "EParser.h" +#include "SaveImg.h" +#include +#include +#include +#include +#include +#include +#include "Mask.h" + +//My modification +#include "opencv2/features2d/features2d.hpp" +#include "opencv2/core/types_c.h" + + +using namespace boost::filesystem; +namespace logging = boost::log; +namespace sinks = boost::log::sinks; +namespace attrs = boost::log::attributes; +namespace src = boost::log::sources; +namespace expr = boost::log::expressions; +namespace keywords = boost::log::keywords; +using namespace std; +using namespace cv; + +/*struct Yuno{ + Yuno(const uint _i, const uint _cpt) : i(_i), cpt(_cpt) {} + uint i; + uint cpt; +};*/ + +class DetectionTemporal : public Detection { + + private : + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public : + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("DetectionTemporal")); + + } + + }initializer; + + vector mListGlobalEvents; // List of global events (Events spread on several frames). + vector mSubdivisionPos; // Position (origin in top left) of 64 subdivisions. + vector mListColors; // One color per local event. + Mat mLocalMask; // Mask used to remove isolated white pixels. + bool mSubdivisionStatus; // If subdivisions positions have been computed. + Mat mPrevThresholdedMap; + vector::iterator mGeToSave; // Global event to save. + int mRoiSize[2]; + int mImgNum; // Current frame number. + Mat mPrevFrame; // Previous frame. + Mat mStaticMask; + string mDebugCurrentPath; + int mDataSetCounter; + bool mDebugUpdateMask; + Mask *mMaskManager; + vector debugFiles; + detectionParam mdtp; + VideoWriter mVideoDebugAutoMask; + + public : + + DetectionTemporal(detectionParam dp, CamPixFmt fmt); + + ~DetectionTemporal(); + + void initMethod(string cfgPath); + + bool runDetection(Frame &c); + + void saveDetectionInfos(string p, int nbFramesAround); + + void resetDetection(bool loadNewDataSet); + + void resetMask(); + + int getEventFirstFrameNb() {return (*mGeToSave).getNumFirstFrame();}; + + TimeDate::Date getEventDate() {return (*mGeToSave).getDate();}; + + int getEventLastFrameNb() {return (*mGeToSave).getNumLastFrame();}; + + vector getDebugFiles(); + + bool madDetection(std::vector &coord_x, std::vector &coord_y); + + private : + + void createDebugDirectories(bool cleanDebugDirectory); + + int selectThreshold(Mat i); + + vector getColorInEventMap(Mat &eventMap, Point roiCenter); + + void colorRoiInBlack(Point p, int h, int w, Mat ®ion); + + void analyseRegion( Mat &subdivision, + Mat &absDiffBinaryMap, + Mat &eventMap, + Mat &posDiff, + int posDiffThreshold, + Mat &negDiff, + int negDiffThreshold, + vector &listLE, + Point subdivisionPos, + int maxNbLE, + int numFrame, + string &msg, + TimeDate::Date cFrameDate); + + + + + +}; + diff --git a/Device.cpp b/Device.cpp new file mode 100644 index 0000000..e811e35 --- /dev/null +++ b/Device.cpp @@ -0,0 +1,681 @@ +/* + Device.cpp + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 20/07/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Device.cpp +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 02/09/2014 +* \brief +*/ + +#include "Device.h" + +boost::log::sources::severity_logger< LogSeverityLevel > Device::logger; + +Device::Init Device::initializer; + +Device::Device(cameraParam cp, framesParam fp, videoParam vp, int cid) { + + mCam = NULL; + mCamID = 0; + mVerbose = true; + mNbDev = -1; + mVideoFramesInput = false; + mGenCamID = cid; + mFPS = cp.ACQ_FPS; + mNightExposure = cp.ACQ_NIGHT_EXPOSURE; + mNightGain = cp.ACQ_NIGHT_GAIN; + mDayExposure = cp.ACQ_DAY_EXPOSURE; + mDayGain = cp.ACQ_DAY_GAIN; + mMinExposureTime = -1; + mMaxExposureTime = -1; + mMinGain = -1; + mMaxGain = -1; + mShiftBits = cp.SHIFT_BITS; + mFormat = cp.ACQ_FORMAT; + mCustomSize = cp.ACQ_RES_CUSTOM_SIZE; + mSizeWidth = cp.ACQ_WIDTH; + mSizeHeight = cp.ACQ_HEIGHT; + mDeviceType = UNDEFINED_INPUT_TYPE; + mvp = vp; + mfp = fp; + //mNbFrame = 0; +} + +Device::Device() { + + mFormat = MONO8; + mNightExposure = 0; + mNightGain = 0; + mDayExposure = 0; + mDayGain = 0; + mMinExposureTime = -1; + mMaxExposureTime = -1; + mMinGain = -1; + mMaxGain = -1; + mFPS = 30; + mCamID = 0; + mGenCamID = 0; + mCustomSize = false; + mSizeWidth = 640; + mSizeHeight = 480; + mCam = NULL; + mVideoFramesInput = false; + mShiftBits = false; + mNbDev = -1; + mVerbose = true; + mDeviceType = UNDEFINED_INPUT_TYPE; + vector finput, vinput; + mvp.INPUT_VIDEO_PATH = vinput; + mfp.INPUT_FRAMES_DIRECTORY_PATH = finput; + //mNbFrame = 0; + +} + +Device::~Device(){ + + if(mCam != NULL) + delete mCam; + +} + +bool Device::createCamera(int id, bool create) { + + if(id >=0 && id < mDevices.size()) { + + // Create Camera object with the correct sdk. + if(!createDevicesWith(mDevices.at(id).second.second)){ + cout << "Fail to select correct sdk : "<< mDevices.at(id).second.second << endl; + return false; + } + + mCamID = mDevices.at(id).first; + + if(mCam != NULL) { + if(create) { + if(!mCam->createDevice(mCamID)){ + BOOST_LOG_SEV(logger, fail) << "Fail to create device with ID : " << id; + mCam->grabCleanse(); + return false; + }else{ + //BOOST_LOG_SEV(logger, fail) << "Success to create device with ID : " << id; + } + } + return true; + } + } + + BOOST_LOG_SEV(logger, fail) << "No device with ID " << id; + + return false; + +} + +bool Device::createCamera() { + + if(mGenCamID >=0 && mGenCamID < mDevices.size()) { + + // Create Camera object with the correct sdk. + if(!createDevicesWith(mDevices.at(mGenCamID).second.second)) + return false; + + mCamID = mDevices.at(mGenCamID).first; + + if(mCam != NULL) { + if(!mCam->createDevice(mCamID)){ + BOOST_LOG_SEV(logger, fail) << "Fail to create device with ID : " << mGenCamID; + mCam->grabCleanse(); + return false; + } + return true; + } + + } + + BOOST_LOG_SEV(logger, fail) << "No device with ID " << mGenCamID; + + return false; + +} + +bool Device::recreateCamera() { + + if(mGenCamID >=0 && mGenCamID < mDevices.size()) { + + mCamID = mDevices.at(mGenCamID).first; + + if(mCam != NULL) { + if(!mCam->createDevice(mCamID)){ + BOOST_LOG_SEV(logger, fail) << "Fail to create device with ID : " << mGenCamID; + mCam->grabCleanse(); + return false; + } + return true; + } + + } + + BOOST_LOG_SEV(logger, fail) << "No device with ID " << mGenCamID; + + return false; + +} + +void Device::setVerbose(bool status) { + + mVerbose = status; + +} + +CamSdkType Device::getDeviceSdk(int id){ + + if(id >=0 && id < mDevices.size()) { + return mDevices.at(id).second.second; + } + + return UNKNOWN; + +} + +bool Device::createDevicesWith(CamSdkType sdk) { + + switch(sdk) { + + case VIDEOFILE : + + { + mVideoFramesInput = true; + mCam = new CameraVideo(mvp.INPUT_VIDEO_PATH, mVerbose); + mCam->grabInitialization(); + } + + break; + + case FRAMESDIR : + + { + + mVideoFramesInput = true; + // Create camera using pre-recorded fits2D in input. + mCam = new CameraFrames(mfp.INPUT_FRAMES_DIRECTORY_PATH, 1, mVerbose); + if(!mCam->grabInitialization()) + throw "Fail to prepare acquisition on the first frames directory."; + + } + + break; + + case V4L2 : + + { + #ifdef LINUX + mCam = new CameraV4l2(); + #endif + } + + break; + + case VIDEOINPUT : + + { + #ifdef WINDOWS + mCam = new CameraWindows(); + #endif + } + + break; + + case ARAVIS : + + { + #ifdef LINUX + mCam = new CameraGigeAravis(mShiftBits); + #endif + } + + break; + + case PYLONGIGE : + + { + #ifdef WINDOWS + mCam = new CameraGigePylon(); + #endif + } + + break; + + case TIS : + + { + #ifdef WINDOWS + mCam = new CameraGigeTis(); + #endif + } + + break; + + default : + + cout << "Unknown sdk." << endl; + + } + + return true; + +} + +InputDeviceType Device::getDeviceType(CamSdkType t) { + + switch(t){ + + case VIDEOFILE : + return VIDEO; + break; + + case FRAMESDIR : + return SINGLE_FITS_FRAME; + break; + + case V4L2 : + case VIDEOINPUT : + case ARAVIS : + case PYLONGIGE : + case TIS : + return CAMERA; + break; + + case UNKNOWN : + return UNDEFINED_INPUT_TYPE; + break; + } + + return UNDEFINED_INPUT_TYPE; +} + +void Device::listDevices(bool printInfos) { + + int nbCam = 0; + mNbDev = 0; + pair elem; // general index to specify camera to use + pair> subElem; // index in a specific sdk + vector> listCams; + + #ifdef WINDOWS + + // PYLONGIGE + + mCam = new CameraGigePylon(); + listCams = mCam->getCamerasList(); + for(int i = 0; i < listCams.size(); i++) { + elem.first = mNbDev; elem.second = PYLONGIGE; + subElem.first = listCams.at(i).first; subElem.second = elem; + mDevices.push_back(subElem); + if(printInfos) cout << "[" << mNbDev << "] " << listCams.at(i).second << endl; + mNbDev++; + } + delete mCam; + + // TIS + + mCam = new CameraGigeTis(); + listCams = mCam->getCamerasList(); + for(int i = 0; i < listCams.size(); i++) { + elem.first = mNbDev; elem.second = TIS; + subElem.first = listCams.at(i).first; subElem.second = elem; + mDevices.push_back(subElem); + if(printInfos) cout << "[" << mNbDev << "] " << listCams.at(i).second << endl; + mNbDev++; + } + delete mCam; + + // WINDOWS + + mCam = new CameraWindows(); + listCams = mCam->getCamerasList(); + for(int i = 0; i < listCams.size(); i++) { + + // Avoid to list basler + std::string::size_type pos1 = listCams.at(i).second.find("Basler"); + std::string::size_type pos2 = listCams.at(i).second.find("BASLER"); + if((pos1 != std::string::npos) || (pos2 != std::string::npos)) { + //std::cout << "found \"words\" at position " << pos1 << std::endl; + } else { + elem.first = mNbDev; elem.second = VIDEOINPUT; + subElem.first = listCams.at(i).first; subElem.second = elem; + mDevices.push_back(subElem); + if(printInfos) cout << "[" << mNbDev << "] " << listCams.at(i).second << endl; + mNbDev++; + } + } + + delete mCam; + + #else + + // ARAVIS + + createDevicesWith(ARAVIS); + listCams = mCam->getCamerasList(); + for(int i = 0; i < listCams.size(); i++) { + elem.first = mNbDev; elem.second = ARAVIS; + subElem.first = listCams.at(i).first; subElem.second = elem; + mDevices.push_back(subElem); + if(printInfos) cout << "[" << mNbDev << "] " << listCams.at(i).second << endl; + mNbDev++; + } + delete mCam; + + // V4L2 + + createDevicesWith(V4L2); + listCams = mCam->getCamerasList(); + for(int i = 0; i < listCams.size(); i++) { + elem.first = mNbDev; elem.second = V4L2; + subElem.first = listCams.at(i).first; subElem.second = elem; + mDevices.push_back(subElem); + if(printInfos) cout << "[" << mNbDev << "] " << listCams.at(i).second << endl; + mNbDev++; + } + delete mCam; + + #endif + + // VIDEO + + elem.first = mNbDev; elem.second = VIDEOFILE; + subElem.first = 0; subElem.second = elem; + mDevices.push_back(subElem); + if(printInfos) cout << "[" << mNbDev << "] VIDEO FILES" << endl; + mNbDev++; + + // FRAMES + + elem.first = mNbDev; elem.second = FRAMESDIR; + subElem.first = 0; subElem.second = elem; + mDevices.push_back(subElem); + if(printInfos) cout << "[" << mNbDev << "] FRAMES DIRECTORY" << endl; + mNbDev++; + + mCam = NULL; + +} + +bool Device::getDeviceName() { + + return mCam->getCameraName(); + +} + +bool Device::setCameraPixelFormat() { + + if(!mCam->setPixelFormat(mFormat)){ + mCam->grabCleanse(); + BOOST_LOG_SEV(logger,fail) << "Fail to set camera format."; + return false; + } + + return true; +} + +bool Device::getSupportedPixelFormats() { + + + mCam->getAvailablePixelFormats(); + return true; + +} + +InputDeviceType Device::getDeviceType() { + + return mCam->getDeviceType(); + +} + +bool Device::getCameraExposureBounds(double &min, double &max) { + + mCam->getExposureBounds(min, max); + return true; +} + +void Device::getCameraExposureBounds() { + + mCam->getExposureBounds(mMinExposureTime, mMaxExposureTime); + +} + +bool Device::getCameraGainBounds(int &min, int &max) { + + mCam->getGainBounds(min, max); + return true; +} + +void Device::getCameraGainBounds() { + + mCam->getGainBounds(mMinGain, mMaxGain); + +} + +bool Device::setCameraNightExposureTime() { + + if(!mCam->setExposureTime(mNightExposure)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set night exposure time to " << mNightExposure; + mCam->grabCleanse(); + return false; + } + + return true; + +} + +bool Device::setCameraDayExposureTime() { + + if(!mCam->setExposureTime(mDayExposure)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set day exposure time to " << mDayExposure; + mCam->grabCleanse(); + return false; + } + + return true; + +} + +bool Device::setCameraNightGain() { + + if(!mCam->setGain(mNightGain)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set night gain to " << mNightGain; + mCam->grabCleanse(); + return false; + } + + return true; + +} + +bool Device::setCameraDayGain() { + + if(!mCam->setGain(mDayGain)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set day gain to " << mDayGain; + mCam->grabCleanse(); + return false; + } + + return true; + +} + +bool Device::setCameraExposureTime(double value) { + + if(!mCam->setExposureTime(value)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set exposure time to " << value; + mCam->grabCleanse(); + return false; + } + + return true; + +} + +bool Device::setCameraGain(int value) { + + if(!mCam->setGain(value)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set gain to " << value; + mCam->grabCleanse(); + return false; + } + + return true; + +} + +bool Device::setCameraFPS() { + + if(!mCam->setFPS(mFPS)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set FPS to " << mFPS; + mCam->grabCleanse(); + return false; + } + + return true; + +} + +bool Device::initializeCamera() { + + if(!mCam->grabInitialization()){ + BOOST_LOG_SEV(logger, fail) << "Fail to initialize camera."; + mCam->grabCleanse(); + return false; + } + + return true; + +} + +bool Device::startCamera() { + + BOOST_LOG_SEV(logger, notification) << "Starting camera..."; + if(!mCam->acqStart()) + return false; + + return true; + +} + +bool Device::stopCamera() { + + BOOST_LOG_SEV(logger, notification) << "Stopping camera..."; + mCam->acqStop(); + mCam->grabCleanse(); + return true; + +} + +bool Device::runContinuousCapture(Frame &img) { + + if(mCam->grabImage(img)) { + //img.mFrameNumber = mNbFrame; + //mNbFrame++; + return true; + } + + return false; + +} + +bool Device::runSingleCapture(Frame &img) { + + if(mCam->grabSingleImage(img, mCamID)) + return true; + + return false; + +} + +bool Device::setCameraSize() { + + if(!mCam->setSize(mSizeWidth, mSizeHeight, mCustomSize)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set camera size."; + return false; + } + + return true; + +} + +bool Device::setCameraSize(int w, int h) { + + if(!mCam->setSize(w, h, true)) { + BOOST_LOG_SEV(logger, fail) << "Fail to set camera size."; + return false; + } + + return true; + +} + +bool Device::getCameraFPS(double &fps) { + + if(!mCam->getFPS(fps)) { + //BOOST_LOG_SEV(logger, fail) << "Fail to get fps value from camera."; + return false; + } + + return true; + +} + +bool Device::getCameraStatus() { + + return mCam->getStopStatus(); + +} + +bool Device::getCameraDataSetStatus() { + + return mCam->getDataSetStatus(); + +} + +bool Device::loadNextCameraDataSet(string &location) { + + return mCam->loadNextDataSet(location); + +} + +bool Device::getExposureStatus() { + + return mCam->mExposureAvailable; + +} + +bool Device::getGainStatus() { + + return mCam->mGainAvailable; + +} diff --git a/Device.h b/Device.h new file mode 100644 index 0000000..8968faa --- /dev/null +++ b/Device.h @@ -0,0 +1,224 @@ +/* + Device.h + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +* +* This file is part of: freeture +* +* Copyright: (C) 2014-2015 Yoan Audureau +* FRIPON-GEOPS-UPSUD-CNRS +* +* License: GNU General Public License +* +* FreeTure is free software: you can redistribute it and/or modify +* it under the terms of the GNU General Public License as published by +* the Free Software Foundation, either version 3 of the License, or +* (at your option) any later version. +* FreeTure is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* You should have received a copy of the GNU General Public License +* along with FreeTure. If not, see . +* +* Last modified: 21/01/2015 +* +*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ + +/** +* \file Device.h +* \author Yoan Audureau -- FRIPON-GEOPS-UPSUD +* \version 1.0 +* \date 13/06/2014 +* \brief +*/ + +#pragma once + +#include "config.h" + +#ifdef LINUX +#define BOOST_LOG_DYN_LINK 1 +#endif + +#include "opencv2/highgui/highgui.hpp" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "ELogSeverityLevel.h" +#include "EImgBitDepth.h" +#include "ECamPixFmt.h" +#include "EParser.h" +#include "Conversion.h" +#include "Camera.h" +#include "CameraGigeAravis.h" +#include "CameraGigePylon.h" +#include "CameraGigeTis.h" +#include "CameraVideo.h" +#include "CameraV4l2.h" +#include "CameraFrames.h" +#include "CameraWindows.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include "EInputDeviceType.h" +#include "ECamSdkType.h" +#include "SParam.h" + +using namespace boost::filesystem; +using namespace cv; +using namespace std; + +class Device { + + public : + + bool mVideoFramesInput; // TRUE if input is a video file or frames directories. + + private : + + static boost::log::sources::severity_logger< LogSeverityLevel > logger; + + static class Init { + + public: + + Init() { + + logger.add_attribute("ClassName", boost::log::attributes::constant("Device")); + + } + + } initializer; + + vector>> mDevices; + + bool mCustomSize; + int mSizeWidth; + int mSizeHeight; + int mNightExposure; + int mNightGain; + int mDayExposure; + int mDayGain; + int mFPS; + int mCamID; // ID in a specific sdk. + int mGenCamID; // General ID. + Camera *mCam; + bool mShiftBits; + bool mVerbose; + framesParam mfp; + videoParam mvp; + + public : + + int mNbDev; + CamPixFmt mFormat; + string mCfgPath; + InputDeviceType mDeviceType; + double mMinExposureTime; + double mMaxExposureTime; + int mMinGain; + int mMaxGain; + //int mNbFrame; + + public : + + Device(cameraParam cp, framesParam fp, videoParam vp, int cid); + + Device(); + + ~Device(); + + InputDeviceType getDeviceType(CamSdkType t); + + CamSdkType getDeviceSdk(int id); + + void listDevices(bool printInfos); + + bool createCamera(int id, bool create); + + bool createCamera(); + + bool initializeCamera(); + + bool runContinuousCapture(Frame &img); + + bool runSingleCapture(Frame &img); + + bool startCamera(); + + bool stopCamera(); + + bool setCameraPixelFormat(); + + bool getCameraGainBounds(int &min, int &max); + + void getCameraGainBounds(); + + bool getCameraExposureBounds(double &min, double &max); + + void getCameraExposureBounds(); + + bool getDeviceName(); + + bool recreateCamera(); + + InputDeviceType getDeviceType(); + + bool setCameraNightExposureTime(); + + bool setCameraDayExposureTime(); + + bool setCameraNightGain(); + + bool setCameraDayGain(); + + bool setCameraExposureTime(double value); + + bool setCameraGain(int value); + + bool setCameraFPS(); + + bool setCameraSize(); + + bool getCameraFPS(double &fps); + + bool getCameraStatus(); + + bool getCameraDataSetStatus(); + + bool getSupportedPixelFormats(); + + bool loadNextCameraDataSet(string &location); + + bool getExposureStatus(); + + bool getGainStatus(); + + bool setCameraSize(int w, int h); + + int getNightExposureTime() {return mNightExposure;}; + int getNightGain() {return mNightGain;}; + int getDayExposureTime() {return mDayExposure;}; + int getDayGain() {return mDayGain;}; + + void setVerbose(bool status); + + private : + + bool createDevicesWith(CamSdkType sdk); + +}; diff --git a/Doxyfile b/Doxyfile new file mode 100644 index 0000000..8801584 --- /dev/null +++ b/Doxyfile @@ -0,0 +1,2329 @@ +# Doxyfile 1.8.7 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all text +# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv +# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv +# for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "FriponCapture" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = 1.0 + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify an logo or icon that is included in +# the documentation. The maximum height of the logo should not exceed 55 pixels +# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo +# to the output directory. + +PROJECT_LOGO = logoFripon.jpg + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a +# new page for each member. If set to NO, the documentation of a member will be +# part of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines. + +ALIASES = + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +TCL_SUBST = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, Javascript, +# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: +# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: +# Fortran. In the later case the parser tries to guess whether the code is fixed +# or free formatted code, this is the default for Fortran type files), VHDL. For +# instance to make doxygen treat .inc files as Fortran files (default is PHP), +# and .f files as C (default is Fortran), use: inc=Fortran f=C. +# +# Note For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See http://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by by putting a % sign in front of the word +# or globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = NO + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. When set to YES local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO these classes will be included in the various overviews. This option has +# no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# (class|struct|union) declarations. If set to NO these declarations will be +# included in the documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = NO + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the +# todo list. This list is created by putting \todo commands in the +# documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the +# test list. This list is created by putting \test commands in the +# documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES the list +# will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. Do not use file names with spaces, bibtex cannot handle them. See +# also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO doxygen will only warn about wrong or incomplete parameter +# documentation, but not about the absence of documentation. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. +# Note: If this tag is empty the current directory is searched. + +INPUT = + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank the +# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, +# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, +# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, +# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, +# *.qsf, *.as and *.js. + +FILE_PATTERNS = + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = NO + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER ) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = NO + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# function all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = NO + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = NO + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES, then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see http://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +# If the CLANG_ASSISTED_PARSING tag is set to YES, then doxygen will use the +# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the +# cost of reduced performance. This can be particularly helpful with template +# rich C++ code for which doxygen's built-in parser lacks the necessary type +# information. +# Note: The availability of this option depends on whether or not doxygen was +# compiled with the --with-libclang option. +# The default value is: NO. + +CLANG_ASSISTED_PARSING = NO + +# If clang assisted parsing is enabled you can provide the compiler with command +# line options that you would normally use when invoking the compiler. Note that +# the include paths will already be set by doxygen for the files and directories +# specified with INPUT and INCLUDE_PATH. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_OPTIONS = + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user- +# defined cascading style sheet that is included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefor more robust against future updates. +# Doxygen will copy the style sheet file to the output directory. For an example +# see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the stylesheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to NO can help when comparing the output of multiple runs. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: http://developer.apple.com/tools/xcode/), introduced with +# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler ( hhc.exe). If non-empty +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated ( +# YES) or that it should be included in the master .chm file ( NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated ( +# YES) or a normal table of contents ( NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# http://www.mathjax.org) which uses client side Javascript for the rendering +# instead of using prerendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from http://www.mathjax.org before deployment. +# The default value is: http://cdn.mathjax.org/mathjax/latest. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /