diff --git a/CMakeLists.txt b/CMakeLists.txt index 28bb6d42..2ab27cb1 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -72,6 +72,7 @@ include(${DEPS_DIRECTORY}/matplot/matplot.cmake) include(${DEPS_DIRECTORY}/protobuf/protobuf.cmake) include(${DEPS_DIRECTORY}/loguru/loguru.cmake) include(${DEPS_DIRECTORY}/imagemagick/imagemagick.cmake) +include(${DEPS_DIRECTORY}/boost/boost.cmake) # ============================= # ============================= diff --git a/README.md b/README.md index 0171a043..1927f4e1 100644 --- a/README.md +++ b/README.md @@ -123,6 +123,47 @@ The airdrop payloads are currently programmed to connect to a WIFI network with this information on boot. To make sure that it is broadcasting correctly, you can try and connect to it via your phone or other device. +## Jetson Setup (PiCamera) +> How to setup networking for a new Jetsoj + +### 1. Clean Up Old Connections +Remove any previous attempts to avoid conflicts. +```bash +sudo nmcli con delete "pi-link" 2>/dev/null +sudo nmcli con delete "Wired connection 1" 2>/dev/null +sudo nmcli con delete "Wired connection 2" 2>/dev/null +``` + +### 2. Create the "MAC-Locked" Connection +We configure the Jetson to listen for the specific MAC address we set on the Pi (`00:dc:c8:f7:75:14`). This ensures the connection works even if the Jetson names the interface `usb0`, `usb2`, or `eth1`. + +```bash +# Create connection binding to the specific MAC address +# IP: 192.168.77.1 +sudo nmcli con add type ethernet con-name "pi-static" ifname "*" mac "00:dc:c8:f7:75:14" ip4 192.168.77.1/24 + +# Set to manual (Static IP) +sudo nmcli con modify "pi-static" ipv4.method manual + +# Bring it up +sudo nmcli con up "pi-static" +``` + +### 3. Verify the Connection +```bash +ip addr show pi-static +``` + +### 4. Test the Connection +```bash +ping 192.168.77.2 +``` +> This should return packets if the connection is set up correctly. + +### Pi-Connection Config: +**Jetson (Server/Receiver):** `192.168.77.1` +**Raspberry Pi (Client/Sender):** `192.168.77.2` + ## Setup Now that everything is installed, here is the process to build and run the @@ -289,6 +330,33 @@ When testing the pipeline, you will likely want to also use the `mock` camera. ... ``` which will use `not-stolen-israeli-code` as the mock camera. + +--- + +#### PiCamera + +Alternatively, you can use the `PiCamera` camera, which will use the PiCamera +on the Jetson to capture images. This must done alongside [camera-things](https://github.com/tritonuas/camera-things/). + +```json + "camera": { + ... + "type": "PiCamera", + ... +``` +Verify that the camera network is set up correctly by running: + +```bash +ip addr show pi-static +``` +> should list pi-static as active, if not run `sudo nmcli con up pi-static` + +Test the connection by running: +```bash +ping 192.168.77.2 +``` +> Make sure the raspy server is running on the pi! + ### SITL diff --git a/configs/dev.json b/configs/dev.json index e55ddd1f..ec756677 100644 --- a/configs/dev.json +++ b/configs/dev.json @@ -68,33 +68,9 @@ "runway": "right", "num_targets": 2, "connection_timeout": 10 - }, - "lucid": { - "sensor_shutter_mode": "Rolling", - - "acquisition_frame_rate_enable": true, - "target_brightness": 20, - "exposure_auto": "Continuous", - "exposure_time": 3000, - "exposure_auto_damping": 1, - "exposure_auto_algorithm": "Median", - "exposure_auto_upper_limit": 500, - "exposure_auto_lower_limit": 360, - - "stream_auto_negotiate_packet_size": true, - "stream_packet_resend_enable": true, - - "device_link_throughput_limit_mode": "On", - "device_link_throughput_limit": 125000000, - - "gamma_enable": true, - "gamma": 0.5, - "gain_auto": "Continuous", - "gain_auto_upper_limit": 10, - "gain_auto_lower_limit": 1 } }, "mavlink": { "log_params": false } -} +} \ No newline at end of file diff --git a/configs/jetson.json b/configs/jetson.json index 47c55326..17bd1675 100644 --- a/configs/jetson.json +++ b/configs/jetson.json @@ -14,7 +14,7 @@ }, "takeoff": { "altitude_m": 60.0, - "payload_size": 2 + "payload_size": 2 }, "pathing": { "laps": 10, @@ -26,10 +26,10 @@ "iterations_per_waypoint": 512, "rewire_radius": 256.0, "optimize": true, - "point_fetch_method": "nearest", + "point_fetch_method": "nearest", "allowed_to_skip_waypoints": false, "generate_deviations": false - }, + }, "coverage": { "altitude_m": 30.0, "camera_vision_m": 6.0, @@ -60,40 +60,16 @@ }, "camera": { "_comment": "See CameraConfig struct in datatypes.hpp for detailed explanations", - "type": "picamera", + "type": "PiCamera", "save_dir": "/obcpp/images/", "save_images_to_file": true, "mock": { "not_stolen_port": 6060, "runway": "right", "num_targets": 2 - }, - "lucid": { - "sensor_shutter_mode": "Rolling", - - "acquisition_frame_rate_enable": true, - "target_brightness": 20, - "exposure_auto": "Continuous", - "exposure_time": 3000, - "exposure_auto_damping": 1, - "exposure_auto_algorithm": "Median", - "exposure_auto_upper_limit": 500, - "exposure_auto_lower_limit": 360, - - "stream_auto_negotiate_packet_size": true, - "stream_packet_resend_enable": true, - - "device_link_throughput_limit_mode": "On", - "device_link_throughput_limit": 125000000, - - "gamma_enable": true, - "gamma": 0.5, - "gain_auto": "Continuous", - "gain_auto_upper_limit": 10, - "gain_auto_lower_limit": 1 } }, "mavlink": { "log_params": false } -} +} \ No newline at end of file diff --git a/deps/boost/boost.cmake b/deps/boost/boost.cmake new file mode 100644 index 00000000..ef2a4089 --- /dev/null +++ b/deps/boost/boost.cmake @@ -0,0 +1,16 @@ +function(target_add_boost target_name) + include(FetchContent) + + FetchContent_Declare(boost + URL https://github.com/boostorg/boost/releases/download/boost-1.87.0/boost-1.87.0-b2-nodocs.tar.gz + URL_HASH SHA256=d6c69e4459eb5d6ec208250291221e7ff4a2affde9af6e49c9303b89c687461f + DOWNLOAD_EXTRACT_TIMESTAMP true + ) + FetchContent_MakeAvailable(boost) + # target_link_libraries(${target_name} PRIVATE + # boost + # ) + target_include_directories(${target_name} PUBLIC ${boost_SOURCE_DIR}) + + +endfunction() diff --git a/include/camera/picamera.hpp b/include/camera/picamera.hpp deleted file mode 100644 index 5e31877d..00000000 --- a/include/camera/picamera.hpp +++ /dev/null @@ -1,77 +0,0 @@ -#ifndef INCLUDE_CAMERA_PICAMERA_HPP_ -#define INCLUDE_CAMERA_PICAMERA_HPP_ - -#include -#include -#include -#include -#include -#include - -#include -#include -#include "camera/interface.hpp" -#include "network/mavlink.hpp" - -class PiCamera : public CameraInterface { - public: - explicit PiCamera(CameraConfig config, int width = 1920, int height = 1080, int framerate = 30); - ~PiCamera(); - - void connect() override; - bool isConnected() override; - - /** - * Start taking photos at an interval in a background thread. - * Also requires a shared_ptr to a MavlinkClient to tag - * images with flight telemetry at capture time. - */ - void startTakingPictures(const std::chrono::milliseconds& interval, - std::shared_ptr mavlinkClient) override; - - /** - * Close background thread started by startTakingPictures - */ - void stopTakingPictures() override; - - /** - * Get the latest image that the camera took. This pops the latest - * image from a queue of images which means that the same image won't. - * be returned in two subsequent calls - */ - std::optional getLatestImage() override; - - /** - * getAllImages returns a queue of all the images that have been - * cached since the last call to getAllImages. Once this is called, - * it returns the cached images and clears the internal cache. - */ - std::deque getAllImages() override; - - /** - * Blocking call that takes an image. If it takes longer than the timeout - * to capture the image, no image is returned. - */ - std::optional takePicture(const std::chrono::milliseconds& timeout, - std::shared_ptr mavlinkClient) override; - void startStreaming() override; - - private: - /** - * Takes an image and sleeps for the specified interval before - * taking another image - */ - void captureEvery(const std::chrono::milliseconds& interval, - std::shared_ptr mavlinkClient); - - std::atomic_bool isTakingPictures; - - cv::VideoCapture cap; - - std::deque imageQueue; - std::shared_mutex imageQueueLock; - - std::thread captureThread; -}; - -#endif // INCLUDE_CAMERA_PICAMERA_HPP_ diff --git a/include/camera/rpi.hpp b/include/camera/rpi.hpp new file mode 100644 index 00000000..7e9b4852 --- /dev/null +++ b/include/camera/rpi.hpp @@ -0,0 +1,65 @@ +#ifndef INCLUDE_CAMERA_RPI_HPP_ +#define INCLUDE_CAMERA_RPI_HPP_ + +#include +#include +#include +#include +#include +#include +#include + +#include +#include "camera/interface.hpp" +#include "network/mavlink.hpp" +#include "network/udp_client.hpp" + +using json = nlohmann::json; +using namespace std::chrono_literals; // NOLINT + +namespace asio = boost::asio; + +enum class CameraRequest : std::uint8_t { + START = 's', + PICTURE = 'I', + END = 'e', + LOCK = 'l' +}; + +class RPICamera : public CameraInterface { + private: + UDPClient client; + asio::io_context io_context_; + std::atomic_bool connected; + + /** + * Converts the 3-plane raw data to BGR cv::Mat, handling stride/padding + */ + std::optional imgConvert(const std::vector>& planes); + + /** + * Reads the 3 planes (Y, U, V) from the camera + */ + std::vector> readImage(); + + public: + explicit RPICamera(CameraConfig config, asio::io_context* io_context_); + ~RPICamera(); + + void connect() override; + bool isConnected() override; + + std::optional getLatestImage() override {return std::nullopt;} + std::deque getAllImages() override {return std::deque();} + + std::optional takePicture(const std::chrono::milliseconds& timeout, + std::shared_ptr mavlinkClient) override; + + void startTakingPictures(const std::chrono::milliseconds& interval, + std::shared_ptr mavlinkClient) override; + void stopTakingPictures() override; + void startStreaming() override; + void ping(); +}; + +#endif // INCLUDE_CAMERA_RPI_HPP_ diff --git a/include/core/mission_state.hpp b/include/core/mission_state.hpp index f97de9b8..82172f2a 100644 --- a/include/core/mission_state.hpp +++ b/include/core/mission_state.hpp @@ -11,6 +11,7 @@ #include #include #include +#include #include "camera/interface.hpp" #include "core/mission_parameters.hpp" @@ -113,6 +114,7 @@ class MissionState { OBCConfig config; std::optional next_airdrop_to_drop; + boost::asio::io_context raspy_io; private: std::mutex converter_mut; @@ -139,6 +141,7 @@ class MissionState { std::mutex cv_mut; // Represents a single detected target used in pipeline std::vector cv_detected_targets; + // Gives an index into cv_detected_targets, and specifies that that bottle is matched // with the detected_target specified by the index std::array cv_matches; diff --git a/include/network/camera_data.hpp b/include/network/camera_data.hpp new file mode 100644 index 00000000..c9856303 --- /dev/null +++ b/include/network/camera_data.hpp @@ -0,0 +1,78 @@ +#ifndef INCLUDE_NETWORK_CAMERA_DATA_HPP_ +#define INCLUDE_NETWORK_CAMERA_DATA_HPP_ + +#include +#include +#include +#include +#include +#include + +struct ImageData_t { + int width; + int height; + size_t imageSizeBytes; + std::vector imgBuffer; + + template + void serialize(Archive & ar, const unsigned int version) { + ar & width; + ar & height; + ar & imageSizeBytes; + ar & imgBuffer; + } +}; + +// What does OBC needs from camera +enum class RequestType_t { + SENDIMAGE +}; + +// TODO: unsure if these need to be in a namespace or not +template +void serialize(Archive & ar, RequestType_t & request, const unsigned int version) { + ar & static_cast(request); // have to type cast enum class +} + +// What is the status of the request (debugging + error handling) +enum class ResponseType_t { + SUCC, + ERROR +}; + +// TODO: unsure if these need to be in a namespace or not +template +void serialize(Archive & ar, ResponseType_t & response, const unsigned int version) { + ar & static_cast(response); // have to type cast enum class +} + +// OBC requesting camera for something +struct CameraRequest_t { + // device id that requests (for debugging) + int pid; + + // type of request, we should figure out edge cases + RequestType_t requestType; + + template + void serialize(Archive & ar, const unsigned int version) { + ar & pid; + ar & requestType; + } +}; + +// Response back from camera to OBC +struct CameraResponse_t { + int pid; + ResponseType_t responseType; + ImageData_t imageData; + + template + void serialize(Archive & ar, const unsigned int version) { + ar & pid; + ar & responseType; + ar & imageData; + } +}; + +#endif // INCLUDE_NETWORK_CAMERA_DATA_HPP_ diff --git a/include/network/rpi_connection.hpp b/include/network/rpi_connection.hpp new file mode 100644 index 00000000..8093016e --- /dev/null +++ b/include/network/rpi_connection.hpp @@ -0,0 +1,32 @@ +#ifndef INCLUDE_NETWORK_RPI_CONNECTION_HPP_ +#define INCLUDE_NETWORK_RPI_CONNECTION_HPP_ + +#include +#include +#pragma once +#include + +// Image Config +inline uint32_t IMG_WIDTH = 1456; +inline uint32_t IMG_HEIGHT = 1088; +inline uint32_t IMG_BUFFER = IMG_WIDTH * IMG_HEIGHT * 3 / 2; + +// Libcamera Strides/Padding +const uint32_t STRIDE_Y = 1472; +const uint32_t STRIDE_UV = 736; + +// Network Config +const char SERVER_IP[] = "192.168.77.2"; +const int SERVER_PORT = 25565; + +const int headerSize = 12; +const uint32_t EXPECTED_MAGIC = 0x12345678; +const size_t CHUNK_SIZE = 1024; + +struct Header { + uint32_t magic; + uint32_t total_chunks; + uint32_t mem_size; +}; + +#endif diff --git a/include/network/serialize.hpp b/include/network/serialize.hpp new file mode 100644 index 00000000..91d1cb2d --- /dev/null +++ b/include/network/serialize.hpp @@ -0,0 +1,26 @@ +#ifndef INCLUDE_NETWORK_SERIALIZE_HPP_ +#define INCLUDE_NETWORK_SERIALIZE_HPP_ + +#include + +/* + Includes some reference on how to serialize and deserialize structs +*/ + +namespace serialh { + template + void serialize(T* response, boost::asio::streambuf* buf) { + std::ostream os(buf); + boost::archive::binary_oarchive oa(os); + oa << *response; + } + + template + void deserialize(T* response, boost::asio::streambuf* buf) { + std::istream is(buf); + boost::archive::binary_iarchive ia(is); + ia >> *response; + } +} // namespace serialh + +#endif // INCLUDE_NETWORK_SERIALIZE_HPP_ diff --git a/include/network/server.hpp b/include/network/server.hpp deleted file mode 100644 index 8a88bd50..00000000 --- a/include/network/server.hpp +++ /dev/null @@ -1,4 +0,0 @@ -#ifndef INCLUDE_NETWORK_SERVER_HPP_ -#define INCLUDE_NETWORK_SERVER_HPP_ - -#endif // INCLUDE_NETWORK_SERVER_HPP_ diff --git a/include/network/udp_client.hpp b/include/network/udp_client.hpp new file mode 100644 index 00000000..38a0ecf1 --- /dev/null +++ b/include/network/udp_client.hpp @@ -0,0 +1,41 @@ +#ifndef INCLUDE_NETWORK_UDP_CLIENT_HPP_ +#define INCLUDE_NETWORK_UDP_CLIENT_HPP_ + +#include +#include + +#include +#include +#include +#include +#include "rpi_connection.hpp" + +namespace asio = boost::asio; + +class UDPClient { + private: + asio::ip::udp::socket socket_; + std::string ip; + int port; + int current_timeout_ms_ = 2000; + + private: + bool waitForData(); + + public: + UDPClient(asio::io_context* io_context_, std::string ip, int port); + + // there isnt really a notion of connect with connectionless udp sockets + bool connect(); + + // Set Timeout for Socket + void setReceiveTimeout(int timeout_ms); + + bool send(std::uint8_t request); + + Header recvHeader(); + + std::vector recvBody(const int mem_size, const int total_chunks); +}; + +#endif // INCLUDE_NETWORK_UDP_CLIENT_HPP_ diff --git a/include/network/udp_server.hpp b/include/network/udp_server.hpp new file mode 100644 index 00000000..bdc3edc9 --- /dev/null +++ b/include/network/udp_server.hpp @@ -0,0 +1,38 @@ +#ifndef INCLUDE_NETWORK_UDP_SERVER_HPP_ +#define INCLUDE_NETWORK_UDP_SERVER_HPP_ + +#include +#include + +#include +#include + +#include "network/rpi_connection.hpp" + +namespace asio = boost::asio; + +class UDPServer { + private: + std::string ip; + int port; + asio::ip::udp::socket socket_; + + cv::Mat createBGR(); + + cv::Mat createYUV(); + + public: + UDPServer(asio::io_context* io_context_, std::string ip, int port); + + bool start(); + + void send(asio::ip::udp::endpoint & endpoint); + + void recv(); + + void handleRequest(char request, asio::ip::udp::endpoint & endpoint); + + void shutdown(); +}; + +#endif // INCLUDE_NETWORK_UDP_SERVER_HPP_ diff --git a/include/utilities/obc_config.hpp b/include/utilities/obc_config.hpp index 186555a1..c58169ed 100644 --- a/include/utilities/obc_config.hpp +++ b/include/utilities/obc_config.hpp @@ -113,7 +113,7 @@ struct PathingConfig { }; struct CameraConfig { - // "mock", "picamera", "picamera-1080p", "picamera-4k", "lucid" + // "mock", "PiCamera" std::string type; // directory to save images to std::string save_dir; diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index b1e78b8d..2f83e7ee 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -43,7 +43,9 @@ target_link_libraries(obcpp_lib_mock INTERFACE add_executable(${PROJECT_NAME} main.cpp) target_include_directories(${PROJECT_NAME} PRIVATE ${INCLUDE_DIRECTORY} ${GEN_PROTOS_DIRECTORY}) -target_link_libraries(${PROJECT_NAME} PRIVATE obcpp_lib) +target_link_libraries(${PROJECT_NAME} PRIVATE + obcpp_lib +) target_add_protobuf(${PROJECT_NAME}) target_add_torch(${PROJECT_NAME}) target_add_torchvision(${PROJECT_NAME}) @@ -53,6 +55,7 @@ target_add_mavsdk(${PROJECT_NAME}) target_add_opencv(${PROJECT_NAME}) target_add_httplib(${PROJECT_NAME}) target_add_loguru(${PROJECT_NAME}) +target_add_boost(${PROJECT_NAME}) target_add_onnxruntime(${PROJECT_NAME}) # for some reason calling target_add_imagemagick here conflicts with, so we are including/linking without the function call # target_add_imagemagick(${PROJECT_NAME}) diff --git a/src/camera/CMakeLists.txt b/src/camera/CMakeLists.txt index 789c6821..7b41ea61 100644 --- a/src/camera/CMakeLists.txt +++ b/src/camera/CMakeLists.txt @@ -3,12 +3,12 @@ set(LIB_NAME obcpp_camera) set(FILES interface.cpp mock.cpp - picamera.cpp + rpi.cpp ) SET(LIB_DEPS obcpp_protos - # obcpp_network # circular dependency + obcpp_network # circular dependency - resolved at executable level ) add_library(${LIB_NAME} STATIC @@ -30,6 +30,7 @@ target_add_httplib(${LIB_NAME}) target_add_mavsdk(${LIB_NAME}) target_add_matplot(${LIB_NAME}) target_add_loguru(${LIB_NAME}) +target_add_boost(${LIB_NAME}) target_include_directories(${LIB_NAME} PRIVATE ${ImageMagick_INCLUDE_DIRS}) -target_link_libraries(${LIB_NAME} PRIVATE -Wl,--copy-dt-needed-entries ${ImageMagick_LIBRARIES}) \ No newline at end of file +target_link_libraries(${LIB_NAME} PRIVATE -Wl,--copy-dt-needed-entries ${ImageMagick_LIBRARIES}) diff --git a/src/camera/interface.cpp b/src/camera/interface.cpp index 279f1de7..8b3939bd 100644 --- a/src/camera/interface.cpp +++ b/src/camera/interface.cpp @@ -1,11 +1,11 @@ #include "camera/interface.hpp" -#include #include +#include #include -#include "nlohmann/json.hpp" #include +#include "nlohmann/json.hpp" #include "network/mavlink.hpp" // imported here but not in the header due to circular dependency #include "utilities/base64.hpp" @@ -17,65 +17,59 @@ CameraInterface::CameraInterface(const CameraConfig& config) : config(config) {} std::string cvMatToBase64(cv::Mat image) { std::vector buf; cv::imencode(".jpg", image, buf); - auto *enc_msg = reinterpret_cast(buf.data()); + auto* enc_msg = reinterpret_cast(buf.data()); return base64_encode(enc_msg, buf.size()); } void saveImageToFile(cv::Mat image, const std::filesystem::path& filepath) { - cv::imwrite(filepath, image); + cv::imwrite(filepath, image); } void saveImageTelemetryToFile(const ImageTelemetry& telemetry, const std::filesystem::path& filepath) { - json telemetry_json = { - {"latitude_deg", telemetry.latitude_deg }, - {"longitude_deg", telemetry.longitude_deg }, - {"altitude_agl_m", telemetry.altitude_agl_m }, - {"airspeed_m_s", telemetry.airspeed_m_s }, - {"heading_deg", telemetry.heading_deg }, - {"yaw_deg", telemetry.yaw_deg }, - {"pitch_deg", telemetry.pitch_deg }, - {"roll_deg", telemetry.roll_deg } - }; - std::ofstream telemetry_file(filepath); - if (!telemetry_file.is_open()) { - LOG_F(ERROR, "Failed to save telemetry json to %s", filepath.string().c_str()); - return; - } - telemetry_file << to_string(telemetry_json); + json telemetry_json = { + {"latitude_deg", telemetry.latitude_deg}, {"longitude_deg", telemetry.longitude_deg}, + {"altitude_agl_m", telemetry.altitude_agl_m}, {"airspeed_m_s", telemetry.airspeed_m_s}, + {"heading_deg", telemetry.heading_deg}, {"yaw_deg", telemetry.yaw_deg}, + {"pitch_deg", telemetry.pitch_deg}, {"roll_deg", telemetry.roll_deg}}; + std::ofstream telemetry_file(filepath); + if (!telemetry_file.is_open()) { + LOG_F(ERROR, "Failed to save telemetry json to %s", filepath.string().c_str()); + return; + } + telemetry_file << to_string(telemetry_json); } std::optional queryMavlinkImageTelemetry( - std::shared_ptr mavlinkClient) { - if (mavlinkClient == nullptr) { - return {}; - } - - auto [lat_deg, lon_deg] = mavlinkClient->latlng_deg(); - double altitude_agl_m = mavlinkClient->altitude_agl_m(); - double airspeed_m_s = mavlinkClient->airspeed_m_s(); - double heading_deg = mavlinkClient->heading_deg(); - double yaw_deg = mavlinkClient->yaw_deg(); - double pitch_deg = mavlinkClient->pitch_deg(); - double roll_deg = mavlinkClient->roll_deg(); - - std::cout << "lat_deg" << lat_deg << std::endl; - std::cout << "long deg" << lon_deg << std::endl; - std::cout << "altitude" << altitude_agl_m << std::endl; - - return ImageTelemetry { - .latitude_deg = lat_deg, - .longitude_deg = lon_deg, - .altitude_agl_m = altitude_agl_m, - .airspeed_m_s = airspeed_m_s, - .heading_deg = heading_deg, - .yaw_deg = yaw_deg, - .pitch_deg = pitch_deg, - .roll_deg = roll_deg - }; + std::shared_ptr mavlinkClient) { + if (mavlinkClient == nullptr) { + return {}; + } + + auto [lat_deg, lon_deg] = mavlinkClient->latlng_deg(); + double altitude_agl_m = mavlinkClient->altitude_agl_m(); + double airspeed_m_s = mavlinkClient->airspeed_m_s(); + double heading_deg = mavlinkClient->heading_deg(); + double yaw_deg = mavlinkClient->yaw_deg(); + double pitch_deg = mavlinkClient->pitch_deg(); + double roll_deg = mavlinkClient->roll_deg(); + + return ImageTelemetry{.latitude_deg = lat_deg, + .longitude_deg = lon_deg, + .altitude_agl_m = altitude_agl_m, + .airspeed_m_s = airspeed_m_s, + .heading_deg = heading_deg, + .yaw_deg = yaw_deg, + .pitch_deg = pitch_deg, + .roll_deg = roll_deg}; } bool ImageData::saveToFile(std::string directory) const { + if (this->DATA.empty() || this->TIMESTAMP == 0) { + LOG_F(ERROR, "Tried to save empty image"); + return false; + } + try { std::filesystem::path save_dir = directory; std::filesystem::path img_filepath = @@ -83,15 +77,16 @@ bool ImageData::saveToFile(std::string directory) const { std::filesystem::path json_filepath = save_dir / (std::to_string(this->TIMESTAMP) + std::string(".json")); - LOG_F(INFO, "Saving %s to %s", - img_filepath.string().c_str(), - json_filepath.string().c_str()); + LOG_F(INFO, "Saving image to %s", img_filepath.string().c_str()); + LOG_F(INFO, "Saving telemetry to %s", json_filepath.string().c_str()); + saveImageToFile(this->DATA, img_filepath); if (this->TELEMETRY.has_value()) { saveImageTelemetryToFile(this->TELEMETRY.value(), json_filepath); } } catch (std::exception& e) { LOG_F(ERROR, "Failed to save image and telemetry to file"); + // std::cout << "Failed to save image and telemetry to file" << '\n'; return false; } diff --git a/src/camera/picamera.cpp b/src/camera/picamera.cpp deleted file mode 100644 index ac8e62b5..00000000 --- a/src/camera/picamera.cpp +++ /dev/null @@ -1,128 +0,0 @@ -#include "camera/picamera.hpp" - -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include - -#include "camera/interface.hpp" -#include "network/mavlink.hpp" -#include "utilities/common.hpp" -#include "utilities/datatypes.hpp" -#include "utilities/locks.hpp" - -using json = nlohmann::json; - -// struct CameraConfig { -// // either "mock" or "lucid" -// std::string type; -// // directory to save images to -// std::string save_dir; -// // whether or not to save to save_dir -// bool save_images_to_file; -// struct { -// // directory to randomly pick images from -// // for the mock camera -// std::string images_dir; -// } mock; -// }; - -PiCamera::PiCamera(CameraConfig config, int width, int height, int framerate) - : CameraInterface(config) { - if (!this->cap.isOpened()) { - LOG_F(FATAL, "FATAL: Could not open picamera"); - } else { - LOG_F(INFO, "PiCamera opened successfully."); - } -} - -void PiCamera::connect() {} - -PiCamera::~PiCamera() {} - -bool PiCamera::isConnected() { return true; } - -void PiCamera::startTakingPictures(const std::chrono::milliseconds& interval, - std::shared_ptr mavlinkClient) { - this->isTakingPictures = true; - try { - this->captureThread = std::thread(&PiCamera::captureEvery, this, interval, mavlinkClient); - } catch (const std::exception& e) { - std::cerr << e.what() << std::endl; - } -} - -void PiCamera::stopTakingPictures() { - if (!this->isTakingPictures) { - return; - } - - this->isTakingPictures = false; - - this->captureThread.join(); -} - -std::optional PiCamera::getLatestImage() { - // Use an exclusive (write) lock because we are modifying the queue. - WriteLock lock(this->imageQueueLock); - if (this->imageQueue.empty()) { - return std::nullopt; - } - ImageData lastImage = this->imageQueue.front(); - this->imageQueue.pop_front(); - return lastImage; -} - -std::deque PiCamera::getAllImages() { - // Use an exclusive (write) lock because we are clearing the queue. - WriteLock lock(this->imageQueueLock); - std::deque outputQueue; - this->imageQueue.swap(outputQueue); // swap is an efficient way to move and clear - return outputQueue; -} - -std::optional PiCamera::takePicture(const std::chrono::milliseconds& timeout, - std::shared_ptr mavlinkClient) { - cv::Mat frame; - - // uint64_t timestamp = getUnixTime_s().count(); - - // ImageData imageData{ - // .DATA = frame, - // .TIMESTAMP = timestamp, - // .TELEMETRY = queryMavlinkImageTelemetry(mavlinkClient), - // }; - - return std::nullopt; -} - -void PiCamera::startStreaming() {} - -void PiCamera::captureEvery(const std::chrono::milliseconds& interval, - std::shared_ptr mavlinkClient) { - loguru::set_thread_name("picamera"); - - while (this->isTakingPictures) { - LOG_F(INFO, "Taking picture with picam."); - auto imageData = this->takePicture(interval, mavlinkClient); - - if (!imageData.has_value()) { - LOG_F(WARNING, "Failed to take picture with picam"); - continue; - } - - WriteLock lock(this->imageQueueLock); - this->imageQueue.push_back(imageData.value()); - lock.unlock(); - - std::this_thread::sleep_for(interval); - } -} diff --git a/src/camera/rpi.cpp b/src/camera/rpi.cpp new file mode 100644 index 00000000..8822525d --- /dev/null +++ b/src/camera/rpi.cpp @@ -0,0 +1,179 @@ +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "camera/rpi.hpp" +#include "network/rpi_connection.hpp" + +// Setup Logging +// ... + +RPICamera::RPICamera(CameraConfig config, asio::io_context* io_context_) + : CameraInterface(config), client(io_context_, SERVER_IP, SERVER_PORT) { + // this->connected = false; + LOG_F(INFO, "RPICamera exists"); +} + +void RPICamera::connect() { + if (this->connected) return; + + // Keep trying to connect/bind logic + // For UDP, "connect" just means opening the socket which is fast + if (client.connect()) { + this->connected = true; + // Optionally send CameraRequest::START if needed, + // but 'I' usually works standalone (i think) + // client.send(static_cast(CameraRequest::START)); + } +} + +RPICamera::~RPICamera() {} + +std::optional RPICamera::takePicture( + const std::chrono::milliseconds& timeout, std::shared_ptr mavlinkClient) { + std::optional telemetryOpt = queryMavlinkImageTelemetry(mavlinkClient); + + if (!telemetryOpt.has_value()) { + LOG_F(WARNING, "Could not grab telemetry data from mavlink"); + } + + // Set timeout dynamically + client.setReceiveTimeout(timeout.count()); + + auto start_time = std::chrono::steady_clock::now(); + + // 1. Send Request + if (!client.send(static_cast(CameraRequest::PICTURE))) { + LOG_F(ERROR, "Failed to send picture request"); + return {}; + } + + // 2. Read 3 Planes + std::vector> planes = readImage(); + + auto end_time = std::chrono::steady_clock::now(); + auto elapsed = std::chrono::duration_cast(end_time - start_time); + + if (planes.size() != 3) { + if (elapsed >= timeout - std::chrono::milliseconds(50)) { + LOG_F(ERROR, "Camera request timed out after %ld ms", timeout.count()); + } else { + LOG_F(ERROR, "Failed to read all 3 planes"); + } + return {}; + } + + // 3. Convert to cv::Mat + std::optional mat = imgConvert(planes); + + if (!mat.has_value()) { + return {}; + } + + uint64_t timestamp = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()).count(); + + return ImageData { + .DATA = mat.value(), + .TIMESTAMP = timestamp, + .TELEMETRY = telemetryOpt + }; +} + +std::vector> RPICamera::readImage() { + std::vector> planes; + + // We expect exactly 3 planes: Y, U, V + for (int i = 0; i < 3; i++) { + Header header = client.recvHeader(); + + if (header.magic != EXPECTED_MAGIC) { + LOG_F(ERROR, "Invalid Magic on plane %d: %x", i, header.magic); + return {}; + } + + std::vector planeData = client.recvBody(header.mem_size, header.total_chunks); + if (planeData.empty()) { + LOG_F(ERROR, "Failed to receive body for plane %d", i); + return {}; + } + + planes.push_back(std::move(planeData)); + } + + return planes; +} + +std::optional RPICamera::imgConvert(const std::vector>& planes) { + // We rely on constants from rpi_connection.hpp: + // IMG_WIDTH, IMG_HEIGHT, STRIDE_Y, STRIDE_UV + + if (planes.size() != 3) return {}; + + const std::vector& p0 = planes[0]; // Y + const std::vector& p1 = planes[1]; // U + const std::vector& p2 = planes[2]; // V + + // Create wrappers around the raw data with specific stride (Step) + // Note: cv::Mat constructor with data pointer does not copy data, so careful with lifetime + // But we copy immediately below. + cv::Mat y_src(IMG_HEIGHT, IMG_WIDTH, CV_8UC1, + reinterpret_cast(const_cast(p0.data())), STRIDE_Y); + cv::Mat u_src(IMG_HEIGHT/2, IMG_WIDTH/2, CV_8UC1, + reinterpret_cast(const_cast(p1.data())), STRIDE_UV); + cv::Mat v_src(IMG_HEIGHT/2, IMG_WIDTH/2, CV_8UC1, + reinterpret_cast(const_cast(p2.data())), STRIDE_UV); + + // Allocate continuous buffer for standard I420 + cv::Mat yuv_continuous(IMG_HEIGHT + IMG_HEIGHT/2, IMG_WIDTH, CV_8UC1); + + // Copy Y (Removes padding) + y_src.copyTo(yuv_continuous(cv::Rect(0, 0, IMG_WIDTH, IMG_HEIGHT))); + + // Copy U and V (Removes padding) + // We need to flatten them carefully into the buffer after Y + size_t y_size = IMG_WIDTH * IMG_HEIGHT; + size_t uv_size = (IMG_WIDTH / 2) * (IMG_HEIGHT / 2); + + uint8_t* u_dst = yuv_continuous.data + y_size; + uint8_t* v_dst = u_dst + uv_size; + + // Copy U + if (u_src.isContinuous()) { + memcpy(u_dst, u_src.data, uv_size); + } else { + for (int row = 0; row < u_src.rows; ++row) { + memcpy(u_dst + row * (IMG_WIDTH/2), u_src.ptr(row), IMG_WIDTH/2); + } + } + + // Copy V + if (v_src.isContinuous()) { + memcpy(v_dst, v_src.data, uv_size); + } else { + for (int row = 0; row < v_src.rows; ++row) { + memcpy(v_dst + row * (IMG_WIDTH/2), v_src.ptr(row), IMG_WIDTH/2); + } + } + + // Convert I420 to BGR + cv::Mat bgr_img; + cv::cvtColor(yuv_continuous, bgr_img, cv::COLOR_YUV2BGR_I420); + + return bgr_img; +} + +// Unused methods stubbed out +void RPICamera::startTakingPictures(const std::chrono::milliseconds& timeout, + std::shared_ptr mavlinkClient) {} +void RPICamera::stopTakingPictures() {} +void RPICamera::startStreaming() {} +void RPICamera::ping() {} +bool RPICamera::isConnected() { return connected; } diff --git a/src/core/CMakeLists.txt b/src/core/CMakeLists.txt index e8675ea5..7b6a93e1 100644 --- a/src/core/CMakeLists.txt +++ b/src/core/CMakeLists.txt @@ -35,6 +35,8 @@ target_add_mavsdk(${LIB_NAME}) target_add_matplot(${LIB_NAME}) target_add_loguru(${LIB_NAME}) target_add_onnxruntime(${LIB_NAME}) +target_add_boost(${LIB_NAME}) + target_include_directories(${LIB_NAME} PRIVATE ${ImageMagick_INCLUDE_DIRS}) -target_link_libraries(${LIB_NAME} PRIVATE -Wl,--copy-dt-needed-entries ${ImageMagick_LIBRARIES}) \ No newline at end of file +target_link_libraries(${LIB_NAME} PRIVATE -Wl,--copy-dt-needed-entries ${ImageMagick_LIBRARIES}) diff --git a/src/core/obc.cpp b/src/core/obc.cpp index ada75b00..dc11942e 100644 --- a/src/core/obc.cpp +++ b/src/core/obc.cpp @@ -4,8 +4,8 @@ #include #include +#include "camera/rpi.hpp" #include "camera/mock.hpp" -#include "camera/picamera.hpp" #include "core/obc.hpp" #include "core/mission_state.hpp" #include "ticks/tick.hpp" @@ -15,6 +15,7 @@ #include "network/airdrop_client.hpp" #include "utilities/logging.hpp" #include "utilities/obc_config.hpp" +#include "network/rpi_connection.hpp" extern "C" { #include "network/airdrop_sockets.h" } @@ -37,18 +38,12 @@ OBC::OBC(OBCConfig config) { if (this->state->config.camera.type == "mock") { this->state->setCamera(std::make_shared(this->state->config.camera)); - } else if (this->state->config.camera.type == "picamera") { - this->state->setCamera(std::make_shared(this->state->config.camera)); - } else if (this->state->config.camera.type == "picamera-1080p") { - this->state->setCamera(std::make_shared( - this->state->config.camera, 1920, 1080, 60)); - } else if (this->state->config.camera.type == "picamera-4k") { - this->state->setCamera(std::make_shared( - this->state->config.camera, 4956, 3040, 30)); - } else if (this->state->config.camera.type == "lucid") { - LOG_F(FATAL, "ATTEMPTING TO CONNECT TO LUCID CAMERA: LUCID CAMERA NO LONGER EXISTS"); + } else if (this->state->config.camera.type == "PiCamera") { + this->state->setCamera( + std::make_shared(config.camera, &(this->state->raspy_io))); + LOG_F(INFO, "Starting Pi Camera Client"); } else { - // default to mock if it's neither "mock" or "lucid" + // default to mock this->state->setCamera(std::make_shared(this->state->config.camera)); LOG_F(WARNING, "deafault camera config"); } diff --git a/src/network/CMakeLists.txt b/src/network/CMakeLists.txt index 69ad91f0..8e444eb8 100644 --- a/src/network/CMakeLists.txt +++ b/src/network/CMakeLists.txt @@ -5,6 +5,8 @@ set(COMMON_FILES gcs_routes.cpp gcs.cpp mavlink.cpp + udp_client.cpp + udp_server.cpp ) set(FILES @@ -44,6 +46,7 @@ target_add_httplib(${LIB_NAME}) target_add_mavsdk(${LIB_NAME}) target_add_matplot(${LIB_NAME}) target_add_loguru(${LIB_NAME}) +target_add_boost(${LIB_NAME}) target_add_onnxruntime(${LIB_NAME}) target_include_directories(${LIB_NAME} PRIVATE ${ImageMagick_INCLUDE_DIRS}) @@ -70,6 +73,8 @@ target_add_httplib(${LIB_NAME}) target_add_mavsdk(${LIB_NAME}) target_add_matplot(${LIB_NAME}) target_add_loguru(${LIB_NAME}) +target_add_boost(${LIB_NAME}) + target_include_directories(${LIB_NAME} PRIVATE ${ImageMagick_INCLUDE_DIRS}) -target_link_libraries(${LIB_NAME} PRIVATE -Wl,--copy-dt-needed-entries ${ImageMagick_LIBRARIES}) \ No newline at end of file +target_link_libraries(${LIB_NAME} PRIVATE -Wl,--copy-dt-needed-entries ${ImageMagick_LIBRARIES}) diff --git a/src/network/gcs_routes.cpp b/src/network/gcs_routes.cpp index 33eb3f0c..2bfbc63b 100644 --- a/src/network/gcs_routes.cpp +++ b/src/network/gcs_routes.cpp @@ -273,7 +273,7 @@ DEF_GCS_HANDLE(Get, camera, capture) { std::string output; google::protobuf::util::MessageToJsonString(manual_image, &output); - LOG_RESPONSE(INFO, "Successfully captured image", OK, output.c_str(), mime::json); + LOG_RESPONSE_HD(INFO, "Successfully captured image", OK, output.c_str(), mime::json); } DEF_GCS_HANDLE(Post, dodropnow) { @@ -363,7 +363,7 @@ DEF_GCS_HANDLE(Get, targets, all) { // logic) if (run.coords.size() != run.bboxes.size()) { LOG_F(ERROR, - "Mismatch between coordinates (%ld) and bboxes (%ld) count in run_id %d. " + "Mismatch between coordinates (%zu) and bboxes (%zu) count in run_id %d. " "Skipping this run.", run.coords.size(), run.bboxes.size(), run.run_id); continue; // Skip this problematic run diff --git a/src/network/udp_client.cpp b/src/network/udp_client.cpp new file mode 100644 index 00000000..1ddea67e --- /dev/null +++ b/src/network/udp_client.cpp @@ -0,0 +1,161 @@ +#include "network/udp_client.hpp" +#include +#include + +UDPClient::UDPClient(asio::io_context* io_context_, std::string ip, int port) + : socket_(*io_context_) { + this->ip = ip; + this->port = port; +} + +bool UDPClient::connect() { + boost::system::error_code ec; + + // Open the UDP socket + this->socket_.open(asio::ip::udp::v4(), ec); + + if (ec) { + LOG_F(ERROR, "Failed to connect"); + return false; + } + + // Set Receive Timeout + struct timeval read_timeout; + read_timeout.tv_sec = 2; + read_timeout.tv_usec = 0; + setsockopt(this->socket_.native_handle(), SOL_SOCKET, SO_RCVTIMEO, &read_timeout, + sizeof(read_timeout)); + + LOG_F(INFO, "Connected to %s on port %d", this->ip.c_str(), this->port); + return true; +} + +void UDPClient::setReceiveTimeout(int timeout_ms) { + this->current_timeout_ms_ = timeout_ms; + struct timeval read_timeout; + read_timeout.tv_sec = timeout_ms / 1000; + read_timeout.tv_usec = (timeout_ms % 1000) * 1000; + setsockopt(this->socket_.native_handle(), SOL_SOCKET, SO_RCVTIMEO, &read_timeout, + sizeof(read_timeout)); +} + +bool UDPClient::waitForData() { + fd_set fds; + FD_ZERO(&fds); + FD_SET(this->socket_.native_handle(), &fds); + + struct timeval tv; + tv.tv_sec = this->current_timeout_ms_ / 1000; + tv.tv_usec = (this->current_timeout_ms_ % 1000) * 1000; + + int rv = select(this->socket_.native_handle() + 1, &fds, NULL, NULL, &tv); + + if (rv == -1) { + LOG_F(ERROR, "Select failed in waitForData."); + return false; + } else if (rv == 0) { + // Timeout + return false; + } + return true; +} + +// send a request to the server +bool UDPClient::send(std::uint8_t request) { + boost::system::error_code ec; + asio::ip::udp::endpoint endpoint_( + asio::ip::udp::endpoint(asio::ip::make_address(this->ip), this->port)); + + LOG_F(INFO, "Sending request %c", request); + + int bytesSent = this->socket_.send_to(asio::buffer(&request, sizeof(request)), + endpoint_, 0, ec); + + if (ec) { + LOG_F(WARNING, "Failed to send request: %s", ec.message().c_str()); + return false; + } + + return true; +} + +// receive an image back from the server +Header UDPClient::recvHeader() { + boost::system::error_code ec; + asio::ip::udp::endpoint sender_endpoint; + Header header; + + if (!this->waitForData()) { + LOG_F(WARNING, "Timeout waiting for header data."); + return {}; + } + + int bytesRead = this->socket_.receive_from(asio::buffer(&header, sizeof(Header)), + sender_endpoint, 0, ec); + + if (ec) { + LOG_F(ERROR, "Failed to read header: %s", ec.message().c_str()); + return {}; + } + + header.magic = ntohl(header.magic); + header.total_chunks = ntohl(header.total_chunks); + header.mem_size = ntohl(header.mem_size); + + return header; +} + +std::vector UDPClient::recvBody(const int mem_size, const int total_chunks) { + boost::system::error_code ec; + asio::ip::udp::endpoint sender_endpoint; + + const int bufSize = mem_size; + std::vector buf(bufSize); + std::vector received_chunks(total_chunks, false); + + int chunks_received_count = 0; + + // Use raw buffer? hopefully no mem leaks + char chunk_buf[CHUNK_SIZE + sizeof(uint32_t)]; + + // Loop until all chunks received + // Note: we prolly want to add a max retry count or timeout check inside the loop + // But the socket timeout will break the loop eventually if data stops coming + while (chunks_received_count < total_chunks) { + if (!this->waitForData()) { + LOG_F(WARNING, "Timeout waiting for body chunks."); + return {}; + } + + size_t bytesRead = this->socket_.receive_from(asio::buffer(chunk_buf), + sender_endpoint, 0, ec); + + if (ec) { + LOG_F(ERROR, "Receive chunk failed: %s", ec.message().c_str()); + // Return what we have or empty? Returning empty signals failure. + return {}; + } + + if (bytesRead < sizeof(uint32_t)) continue; + + uint32_t chunk_idx = ntohl(*reinterpret_cast(chunk_buf)); + size_t data_size = bytesRead - sizeof(uint32_t); + size_t offset = chunk_idx * CHUNK_SIZE; + + if (offset + data_size > static_cast(bufSize)) { + LOG_F(ERROR, "Chunk exceeds buffer bounds"); + continue; + } + + // copy into buffer + memcpy(buf.data() + offset, chunk_buf + sizeof(uint32_t), data_size); + + if (!received_chunks[chunk_idx]) { + received_chunks[chunk_idx] = true; + chunks_received_count++; + } + } + + LOG_F(INFO, "Successfully reconstructed plane: %lu bytes", buf.size()); + return buf; +} diff --git a/src/network/udp_server.cpp b/src/network/udp_server.cpp new file mode 100644 index 00000000..ef0c2a9d --- /dev/null +++ b/src/network/udp_server.cpp @@ -0,0 +1,311 @@ +#include +#include "network/udp_server.hpp" + +// TODO: Didn't touch this for the most part since its for mocking. + +cv::Mat UDPServer::createBGR() { + std::cout << std::filesystem::current_path() << '\n'; + cv::Mat image = cv::imread("/workspaces/obcpp/tests/integration/images/blurb.jpeg"); + if (image.empty()) { + std::cerr << "Failed to load image. Check path: blurb.jpeg\n"; + exit(1); // or return a default image + } + std::cout << "Image is not empty" << '\n'; + return image; +} + +cv::Mat UDPServer::createYUV() { + std::cout << std::filesystem::current_path() << '\n'; + // read in an image + cv::Mat image = cv::imread("/workspaces/obcpp/tests/integration/images/blurb.jpeg"); + + // convert into yuv420 + cv::Mat yuv_img(IMG_HEIGHT * 3 / 2, IMG_WIDTH, CV_8UC1); + cv::cvtColor(image, yuv_img, cv::COLOR_BGR2YUV_I420); + + return yuv_img; +} + +UDPServer::UDPServer(asio::io_context* io_context_, std::string ip, int port) + : socket_(*io_context_) { + this->ip = ip; + this->port = port; +} + +bool UDPServer::start() { + boost::system::error_code open_ec; + boost::system::error_code bind_ec; + asio::ip::udp::endpoint endpoint_( + asio::ip::udp::endpoint(asio::ip::make_address(this->ip), this->port)); + + std::cout << "Attempting to connect to " << this->ip << " on port " << this->port << '\n'; + + // open the udp socket + this->socket_.open(asio::ip::udp::v4(), open_ec); + + if (open_ec) { + std::cout << "Failed to open socket: " << open_ec.message() << '\n'; + return false; + } + + // bind to this address and port + this->socket_.bind(endpoint_, bind_ec); + + std::cout << "Endpoint: " << endpoint_.address() << endpoint_.port() << + + '\n'; + + if (bind_ec) { + std::cout << "Failed to bind socket: " << bind_ec.message() << '\n'; + return false; + } + + return true; +} + +void UDPServer::send(asio::ip::udp::endpoint & endpoint) { + // Note: use the endpoint from recv() to send back + + boost::system::error_code header_ec; + boost::system::error_code body_ec; + + std::cout << "Taking picture (reads in image)" << '\n'; + + // cv::Mat img = createBGR(); + cv::Mat img = createYUV(); + + std:: cout << "Putting image into buffer" << '\n'; + + cv::Mat flatten = img.reshape(1, img.total() * img.channels()); + std::vector imgBuffer = img.isContinuous() ? flatten : flatten.clone(); + + if (imgBuffer.size() != IMG_BUFFER) { + std::cout << "size: " << imgBuffer.size() << " expected: " << IMG_BUFFER << '\n'; + return; + } + + uint32_t total_chunks = imgBuffer.size() / CHUNK_SIZE; + + Header header; + + header.magic = htonl(EXPECTED_MAGIC); + header.mem_size = htonl(CHUNK_SIZE); + header.total_chunks = htonl(total_chunks); + + // send header + int bytesSentHeader = this->socket_.send_to(asio::buffer(&header, sizeof(header)), + endpoint, 0, header_ec); + + if (header_ec) { + std::cout << "Sending header failed: " << header_ec.message() << '\n'; + return; + } + + std::cout << "Read bytes (header): " << bytesSentHeader << '\n'; + + // send body + + const int buf_size = imgBuffer.size(); + + std::cout << "Total chunks: " << imgBuffer.size() / CHUNK_SIZE << '\n'; + + int totalBytesSent = 0; + + for (uint32_t i = 0; i < total_chunks; i++) { + const size_t offset = i * CHUNK_SIZE; + const size_t remaining = buf_size - offset; + const size_t data_size = std::min(CHUNK_SIZE, remaining); + + std::vector packet(sizeof(uint32_t) + data_size); + + // puts chunk index as the header + uint32_t* index_ptr = reinterpret_cast(packet.data()); + *index_ptr = htonl(i); + + + memcpy(packet.data() + sizeof(uint32_t), imgBuffer.data() + offset, data_size); + + std::this_thread::sleep_for(std::chrono::microseconds(50)); + int bytesSentBody = this->socket_.send_to(asio::buffer(packet), endpoint, 0, body_ec); + + totalBytesSent += (bytesSentBody - sizeof(uint32_t)); + + if (body_ec) { + std::cout << "Sending body failed: " << body_ec.message() << '\n'; + return; + } + } + + std::cout << "Finished sending " << totalBytesSent << " bytes" << '\n'; + + // int bytesSentBody = this->socket_.send_to(asio::buffer(imgBuffer), endpoint, 0, body_ec); + + // std::cout << "Read bytes (body): " << bytesSentBody << '\n'; +} + +// expecting a request from the client +void UDPServer::recv() { + boost::system::error_code ec; + asio::ip::udp::endpoint client_endpoint; + + char request; + + int bytesRead = this->socket_.receive_from(asio::buffer(&request, sizeof(request)), + client_endpoint, 0, ec); + + if (ec) { + std::cout << "Failed to read request: " << ec.message() << '\n'; + } + + std::cout << "Bytes read (request): " << bytesRead << '\n'; + + handleRequest(request, client_endpoint); +} + +void UDPServer::handleRequest(char request, asio::ip::udp::endpoint & endpoint) { + if (request == 'I') { + this->send(endpoint); + } else if (request == 'e') { + this->shutdown(); + } else { + std::cout << "Invalid request: " << request << '\n'; + } +} + +void UDPServer::shutdown() { + std::cout << "shutting down" << '\n'; +} + +// cv::Mat UDPServer::createBGR() { +// cv::Mat image = cv::imread("blurb.jpeg"); +// if (image.empty()) { +// std::cerr << "Failed to load image. Check path: blurb.jpeg\n"; +// exit(1); // or return a default image +// } +// std::cout << "Image is not empty" << '\n'; +// return image; +// } + +// cv::Mat UDPServer::createYUV() { +// // read in an image +// cv::Mat image = cv::imread("blurb.jpeg"); + +// // convert into yuv420 +// cv::Mat yuv_img(IMG_HEIGHT * 3 / 2, IMG_WIDTH, CV_8UC1); +// cv::cvtColor(image, yuv_img, cv::COLOR_BGR2YUV_I420); + +// return yuv_img; +// } + +// UDPServer::UDPServer(asio::io_context* io_context_, std::string ip, int port) +// : socket_(*io_context_) { +// this->ip = ip; +// this->port = port; +// } + +// bool UDPServer::start() { +// boost::system::error_code open_ec; +// boost::system::error_code bind_ec; +// asio::ip::udp::endpoint endpoint_( +// asio::ip::udp::endpoint(asio::ip::make_address(this->ip), this->port)); + +// std::cout << ("Attempting to connect to %s on port %d", this->ip, this->port) +// << '\n'; + +// // open the udp socket +// this->socket_.open(asio::ip::udp::v4(), open_ec); + +// if (open_ec) { +// std::cout << ("Failed to open socket: %s", open_ec.message()) << '\n'; +// return false; +// } + +// // bind to this address and port +// this->socket_.bind(endpoint_, bind_ec); + +// if (bind_ec) { +// std::cout << ("Failed to bind socket: %s", bind_ec.message()) << '\n'; +// return false; +// } + +// return true; +// } + +// void UDPServer::send(asio::ip::udp::endpoint & endpoint) { +// // Note: use the endpoint from recv() to send back + +// boost::system::error_code header_ec; +// boost::system::error_code body_ec; + +// std::cout << "Taking picture (reads in image)" << '\n'; + +// // cv::Mat img = createBGR(); +// cv::Mat img = createYUV(); + +// std:: cout << "Putting image into buffer" << '\n'; + +// cv::Mat flatten = img.reshape(1, img.total() * img.channels()); +// std::vector imgBuffer = img.isContinuous() ? flatten : flatten.clone(); + +// if (imgBuffer.size() != BUFFER_SIZE) { +// std::cout << "size: " << imgBuffer.size() << " expected: " << BUFFER_SIZE << '\n'; +// return; +// } + +// Header header; + +// header.magic = htonl(EXPECTED_MAGIC); +// header.mem_size = htonl(CHUNK_SIZE); +// header.total_chunks = htonl(imgBuffer.size() / CHUNK_SIZE); + +// // send header +// int bytesReadHeader = this->socket_.send_to(asio::buffer(&header, sizeof(header)), +// endpoint, 0, header_ec); + +// if (header_ec) { +// std::cout << ("Sending header failed: %s", header_ec.message()) << '\n'; +// return; +// } + +// // send body +// int bytesReadBody = this->socket_.send_to(asio::buffer(imgBuffer), endpoint, 0, body_ec); + +// if (body_ec) { +// std::cout << ("Sending body failed: %s", body_ec.message()) << '\n'; +// return; +// } + +// } + +// // expecting a request from the client +// void UDPServer::recv() { +// boost::system::error_code ec; +// asio::ip::udp::endpoint client_endpoint; + +// char request; + +// int bytesRead = this->socket_.receive_from(asio::buffer(&request, sizeof(request)), +// client_endpoint, 0, ec); + +// if (ec) { +// std::cout << ("Failed to read request: %s", ec.message()) << '\n'; +// } + +// std::cout << ("Bytes read (request): %d", bytesRead) << '\n'; + +// handleRequest(request, client_endpoint); +// } + +// void UDPServer::handleRequest(char request, asio::ip::udp::endpoint & endpoint) { +// if (request == 'p') { +// this->send(endpoint); +// } else if (request == 'e') { +// this->shutdown(); +// } else { +// std::cout << ("Invalid request: %c", request) << '\n'; +// } +// } + +// void UDPServer::shutdown() { +// std::cout << "shutting down" << '\n'; +// } diff --git a/src/pathing/CMakeLists.txt b/src/pathing/CMakeLists.txt index 97bf7d8e..a2d80be2 100644 --- a/src/pathing/CMakeLists.txt +++ b/src/pathing/CMakeLists.txt @@ -24,6 +24,7 @@ target_link_libraries(${LIB_NAME} PRIVATE ${LIB_DEPS}) set_unity_for_target(${LIB_NAME}) +target_add_boost(${LIB_NAME}) target_add_protobuf(${LIB_NAME}) target_add_torch(${LIB_NAME}) target_add_torchvision(${LIB_NAME}) diff --git a/src/ticks/CMakeLists.txt b/src/ticks/CMakeLists.txt index 616c5804..23bc2605 100644 --- a/src/ticks/CMakeLists.txt +++ b/src/ticks/CMakeLists.txt @@ -37,6 +37,7 @@ target_link_libraries(${LIB_NAME} PRIVATE set_unity_for_target(${LIB_NAME}) +target_add_boost(${LIB_NAME}) target_add_protobuf(${LIB_NAME}) target_add_torch(${LIB_NAME}) target_add_torchvision(${LIB_NAME}) diff --git a/tests/integration/CMakeLists.txt b/tests/integration/CMakeLists.txt index 774ce9ef..66295239 100644 --- a/tests/integration/CMakeLists.txt +++ b/tests/integration/CMakeLists.txt @@ -136,9 +136,33 @@ target_add_loguru(airdrop_packets) # add_executable(airdrop_sockets src/network/airdrop_sockets.c tests/integration/airdrop_sockets.c) # target_include_directories(airdrop_sockets PRIVATE ${INCLUDE_DIRECTORY}) -# cuda_check -add_executable(cuda_check ${SOURCES} cuda_check.cpp) -target_add_torch(cuda_check) +# add_executable(camera_playground camera_playground.cpp) +# target_include_directories(camera_playground PRIVATE ${INCLUDE_DIRECTORY}) +# add_custom_target( +# run_camera_playground +# ${CMAKE_COMMAND} +# -E env LD_LIBRARY_s/target_siamese_1.pt +# ) + + + +add_executable(udp_server_mock "udp_server_mock.cpp") +target_link_libraries(udp_server_mock PRIVATE + obcpp_lib +) +target_include_directories(udp_server_mock PRIVATE ${INCLUDE_DIRECTORY}) +target_add_boost(udp_server_mock) +target_add_protobuf(udp_server_mock) +target_add_torch(udp_server_mock) +target_add_torchvision(udp_server_mock) +target_add_json(udp_server_mock) +target_add_opencv(udp_server_mock) +target_add_httplib(udp_server_mock) +target_add_mavsdk(udp_server_mock) +target_add_matplot(udp_server_mock) +target_add_loguru(udp_server_mock) +target_include_directories(udp_server_mock PRIVATE ${ImageMagick_INCLUDE_DIRS}) +target_link_libraries(udp_server_mock PRIVATE -Wl,--copy-dt-needed-entries ${ImageMagick_LIBRARIES}) add_executable(drop_it_like_its_hot "drop_it_like_its_hot.cpp") target_link_libraries(drop_it_like_its_hot PRIVATE obcpp_lib) @@ -148,16 +172,20 @@ target_add_mavsdk(drop_it_like_its_hot) target_add_matplot(drop_it_like_its_hot) target_add_loguru(drop_it_like_its_hot) -add_executable(picamera "picamera.cpp") -target_link_libraries(picamera PRIVATE obcpp_lib obcpp_camera obcpp_network) -target_include_directories(picamera PRIVATE ${INCLUDE_DIRECTORY}) -target_add_json(picamera) -target_add_mavsdk(picamera) -target_add_matplot(picamera) -target_add_loguru(picamera) - add_executable(clustering "clustering.cpp") target_link_libraries(clustering PRIVATE obcpp_lib obcpp_camera) target_add_matplot(clustering) target_add_json(clustering) -target_add_protobuf(clustering) \ No newline at end of file +target_add_protobuf(clustering) + +add_executable(camera_integration_test "camera_integration_test.cpp") +target_link_libraries(camera_integration_test PRIVATE obcpp_lib) +target_include_directories(camera_integration_test PRIVATE ${INCLUDE_DIRECTORY}) +target_add_loguru(camera_integration_test) +target_add_opencv(camera_integration_test) +target_add_boost(camera_integration_test) +target_add_mavsdk(camera_integration_test) +target_add_matplot(camera_integration_test) +target_add_json(camera_integration_test) +target_include_directories(camera_integration_test PRIVATE ${ImageMagick_INCLUDE_DIRS}) +target_link_libraries(camera_integration_test PRIVATE -Wl,--copy-dt-needed-entries ${ImageMagick_LIBRARIES}) diff --git a/tests/integration/camera_integration_test.cpp b/tests/integration/camera_integration_test.cpp new file mode 100644 index 00000000..78ca5d5e --- /dev/null +++ b/tests/integration/camera_integration_test.cpp @@ -0,0 +1,59 @@ +#include +#include +#include +#include +#include +#include "camera/rpi.hpp" +#include "utilities/obc_config.hpp" +#include "core/mission_state.hpp" +#include "utilities/common.hpp" + +namespace fs = std::filesystem; + +int main(int argc, char* argv[]) { + loguru::init(argc, argv); + + CameraConfig config; + asio::io_context io_context_; + const int NUM_IMAGES = 5; + const std::chrono::milliseconds IMAGE_INTERVAL = std::chrono::milliseconds(250); + + LOG_F(INFO, "[Test] Initializing RPICamera..."); + RPICamera camera(config, &io_context_); + + LOG_F(INFO, "[Test] Connecting..."); + camera.connect(); + + if (!camera.isConnected()) { + LOG_F(ERROR, "[Test] Failed to connect/bind socket"); + return -1; + } + + for (int i = 0; i < NUM_IMAGES; i++) { + LOG_F(INFO, "[Test] Requesting Picture %d/%d", i + 1, NUM_IMAGES); + std::this_thread::sleep_for(IMAGE_INTERVAL); + std::optional img = camera.takePicture(std::chrono::milliseconds(3000), nullptr); + + if (img.has_value()) { + LOG_F(INFO, "[Test] Image Received! Size: %ld bytes.", img.value().DATA.total()); + + fs::path base_dir = "images"; + if (!fs::exists(base_dir)) { + fs::create_directory(base_dir); + } + + fs::path filepath = base_dir / (std::to_string(img.value().TIMESTAMP) + ".png"); + + if (img.value().saveToFile(base_dir.string())) { + LOG_F(INFO, "[Test] Saved successfully to %s", base_dir.c_str()); + } else { + LOG_F(ERROR, "[Test] Failed to save image file."); + } + } else { + LOG_F(ERROR, "[Test] FAILED: No image received (Timeout or Error)."); + return 1; + } + } + + return 0; +} diff --git a/tests/integration/picamera.cpp b/tests/integration/picamera.cpp deleted file mode 100644 index 7f9100b4..00000000 --- a/tests/integration/picamera.cpp +++ /dev/null @@ -1,49 +0,0 @@ -#include -#include -#include -#include - -#include -#include - -#include "core/mission_state.hpp" -#include "camera/interface.hpp" -#include "camera/picamera.hpp" -#include "utilities/common.hpp" - - -using namespace std::chrono_literals; - -int main (int argc, char *argv[]) { - if (argc < 2) { - LOG_F(ERROR, "Usage: ./bin/picamera [path_to_config] [optional: output_dir]"); - exit(1); - } - std::filesystem::path output_dir = std::filesystem::current_path(); - if (argc >= 3) { - output_dir = argv[2]; - } - - OBCConfig config(argc, argv); - PiCamera camera(config.camera); - - camera.connect(); - - // start taking pictures every second - camera.startTakingPictures(1s, nullptr); - // need to sleep to let camera background thread to run - std::this_thread::sleep_for(5s); - camera.stopTakingPictures(); - - std::deque images = camera.getAllImages(); - for (const ImageData& image : images) { - std::filesystem::path save_dir = config.camera.save_dir; - std::filesystem::path img_filepath = save_dir / (std::to_string(image.TIMESTAMP) + std::string(".jpg")); - std::filesystem::path json_filepath = save_dir / (std::to_string(image.TIMESTAMP) + std::string(".json")); - saveImageToFile(image.DATA, img_filepath); - if (image.TELEMETRY.has_value()) { - saveImageTelemetryToFile(image.TELEMETRY.value(), json_filepath); - } - LOG_F(INFO, "Saving piaam image to %s", img_filepath.string().c_str()); - } -} diff --git a/tests/integration/udp_server_mock.cpp b/tests/integration/udp_server_mock.cpp new file mode 100644 index 00000000..26227696 --- /dev/null +++ b/tests/integration/udp_server_mock.cpp @@ -0,0 +1,24 @@ +#include +#include +#include +#include "network/udp_server.hpp" + +namespace asio = boost::asio; + +const std::string IP = "0.0.0.0"; +const int PORT = 5000; + +int main() { + asio::io_context io_context_; + UDPServer server(&io_context_, SERVER_IP, SERVER_PORT); + + // for (;;) { + // start the server in a while loop + server.start(); + // } + + + for (;;) { + server.recv(); + } +}