diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 4cb1877..6b394c4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -21,11 +21,17 @@ jobs: bind_mount_repository: true commands: | apt-get update - apt-get install -y libopencv-dev libegl1-mesa-dev libcamera-dev cmake build-essential libdrm-dev libgbm-dev openjdk-11-jdk + apt-get install -y libopencv-dev libegl1-mesa-dev cmake build-essential libdrm-dev libgbm-dev openjdk-11-jdk + apt-get remove -y libcamera0 python3-libcamera + wget https://github.com/ArduCAM/Arducam-Pivariety-V4L2-Driver/releases/download/libcamera-v0.0.5/libcamera-dev-0.0.12-bullseye-arm64.deb + dpkg -i libcamera-dev-0.0.12-bullseye-arm64.deb + apt-get -f install cmake -B build-pi -DCMAKE_BUILD_TYPE=Release cmake --build build-pi -j 4 - run: find . + + - run: find / | grep -e "libcamera.so" - uses: actions/upload-artifact@master with: diff --git a/blocking_future.h b/blocking_future.h index 7660caf..b843afe 100644 --- a/blocking_future.h +++ b/blocking_future.h @@ -1,8 +1,8 @@ #pragma once -#include -#include #include +#include +#include template class BlockingFuture { public: diff --git a/camera_grabber.cpp b/camera_grabber.cpp index d34f7a1..c4e68e0 100644 --- a/camera_grabber.cpp +++ b/camera_grabber.cpp @@ -8,12 +8,13 @@ CameraGrabber::CameraGrabber(std::shared_ptr camera, int width, int height, int rotation) - : m_buf_allocator(camera), m_camera(std::move(camera)), m_cameraExposureProfiles(std::nullopt) { + : m_buf_allocator(camera), m_camera(std::move(camera)), + m_cameraExposureProfiles(std::nullopt) { if (m_camera->acquire()) { throw std::runtime_error("failed to acquire camera"); } - + // Determine model auto &cprp = m_camera->properties(); auto model = cprp.get(libcamera::properties::Model); @@ -23,30 +24,33 @@ CameraGrabber::CameraGrabber(std::shared_ptr camera, m_model = Unknown; } - std::cout << "Model " << m_model << std::endl; + std::cout << "Model " << m_model << std::endl; auto config = m_camera->generateConfiguration( {libcamera::StreamRole::VideoRecording}); - + // print active arrays - if (m_camera->properties().contains(libcamera::properties::PIXEL_ARRAY_ACTIVE_AREAS)) { + if (m_camera->properties().contains( + libcamera::properties::PIXEL_ARRAY_ACTIVE_AREAS)) { printf("Active areas:\n"); - auto rects = m_camera->properties().get(libcamera::properties::PixelArrayActiveAreas); + auto rects = m_camera->properties().get( + libcamera::properties::PixelArrayActiveAreas); if (rects.has_value()) { - for(const auto rect : rects.value()) { + for (const auto rect : rects.value()) { std::cout << rect.toString() << std::endl; } } - } else + } else printf("No active areas\n"); config->at(0).size.width = width; config->at(0).size.height = height; - printf("Rotation = %i\n", rotation); + // printf("Rotation = %i\n", rotation); if (rotation == 180) { using namespace libcamera; - config->transform = Transform::HFlip * Transform::VFlip * libcamera::Transform::Identity; + config->transform = Transform::HFlip * Transform::VFlip * + libcamera::Transform::Identity; } else { config->transform = libcamera::Transform::Identity; } @@ -124,32 +128,34 @@ void CameraGrabber::setControls(libcamera::Request *request) { controls_.set(controls::AwbEnable, false); // AWB disabled } controls_.set(controls::AnalogueGain, - m_settings.analogGain); // Analog gain, min 1 max big number? + m_settings.analogGain); // Analog gain, min 1 max big number? if (m_model != OV9281) { controls_.set(controls::ColourGains, - libcamera::Span{ - {m_settings.awbRedGain, - m_settings.awbBlueGain}}); // AWB gains, red and blue, - // unknown range + libcamera::Span{ + {m_settings.awbRedGain, + m_settings.awbBlueGain}}); // AWB gains, red and + // blue, unknown range } - // Note about brightness: -1 makes everything look deep fried, 0 is probably best for most things + // Note about brightness: -1 makes everything look deep fried, 0 is probably + // best for most things controls_.set(libcamera::controls::Brightness, - m_settings.brightness); // -1 to 1, 0 means unchanged + m_settings.brightness); // -1 to 1, 0 means unchanged controls_.set(controls::Contrast, - m_settings.contrast); // Nominal 1 + m_settings.contrast); // Nominal 1 if (m_model != OV9281) { controls_.set(controls::Saturation, - m_settings.saturation); // Nominal 1, 0 would be greyscale + m_settings.saturation); // Nominal 1, 0 would be greyscale } if (m_settings.doAutoExposure) { controls_.set(controls::AeEnable, - true); // Auto exposure disabled + true); // Auto exposure disabled - controls_.set(controls::AeMeteringMode, controls::MeteringCentreWeighted); + controls_.set(controls::AeMeteringMode, + controls::MeteringCentreWeighted); if (m_model == OV9281) { controls_.set(controls::AeExposureMode, controls::ExposureNormal); } else { @@ -160,30 +166,46 @@ void CameraGrabber::setControls(libcamera::Request *request) { // seconds * 1e6 = uS constexpr const int MIN_FRAME_TIME = 1e6 / 250; constexpr const int MAX_FRAME_TIME = 1e6 / 15; - controls_.set( - libcamera::controls::FrameDurationLimits, - libcamera::Span{ - {MIN_FRAME_TIME, MAX_FRAME_TIME}}); + controls_.set(libcamera::controls::FrameDurationLimits, + libcamera::Span{ + {MIN_FRAME_TIME, MAX_FRAME_TIME}}); } else { controls_.set(controls::AeEnable, - false); // Auto exposure disabled + false); // Auto exposure disabled controls_.set(controls::ExposureTime, - m_settings.exposureTimeUs); // in microseconds + m_settings.exposureTimeUs); // in microseconds controls_.set( libcamera::controls::FrameDurationLimits, libcamera::Span{ {m_settings.exposureTimeUs, - m_settings.exposureTimeUs}}); // Set default to zero, we have - // specified the exposure time + m_settings.exposureTimeUs}}); // Set default to zero, we have + // specified the exposure time + } + + if (m_settings.doAutofocus) { + printf("Starting autofocus...\n"); + controls_.set(controls::AfMode, + controls::AfModeAuto); // auto focus enable + controls_.set(controls::AfTrigger, + controls::AfTriggerStart); // start a focus scan + m_settings.doAutofocus = false; + } + + if (auto result = controls_.get(libcamera::controls::AfState)) { + afState = *result; + } else { + afState = controls::AfStateFailed; } controls_.set(controls::ExposureValue, 0); - + if (m_model != OV7251 && m_model != OV9281) { controls_.set(controls::Sharpness, 1); } } +int CameraGrabber::getAutofocusStatus() { return afState; } + void CameraGrabber::startAndQueue() { running = true; if (m_camera->start()) { @@ -204,7 +226,6 @@ void CameraGrabber::stop() { m_camera->stop(); } - void CameraGrabber::setOnData( std::function onData) { m_onData = std::move(onData); diff --git a/camera_grabber.h b/camera_grabber.h index a37d1c5..524e391 100644 --- a/camera_grabber.h +++ b/camera_grabber.h @@ -18,7 +18,7 @@ struct CameraSettings { float awbBlueGain = 1.5; float saturation = 1; bool doAutoExposure = false; - // float digitalGain = 100; + bool doAutofocus = false; }; class CameraGrabber { @@ -35,6 +35,8 @@ class CameraGrabber { inline CameraSettings &cameraSettings() { return m_settings; } + int getAutofocusStatus(); + // Note: these 3 functions must be protected by mutual exclusion. // Failure to do so will result in UB. void startAndQueue(); @@ -50,14 +52,16 @@ class CameraGrabber { std::vector> m_requests; std::shared_ptr m_camera; CameraModel m_model; - std::optional> m_cameraExposureProfiles; + std::optional> + m_cameraExposureProfiles; std::unique_ptr m_config; std::optional> m_onData; + int afState; + CameraSettings m_settings{}; bool running = false; - void setControls(libcamera::Request *request); }; diff --git a/camera_model.cpp b/camera_model.cpp index 692e669..4e235ae 100644 --- a/camera_model.cpp +++ b/camera_model.cpp @@ -1,14 +1,21 @@ #include "camera_model.h" #include -CameraModel stringToModel(const std::string& model) { +CameraModel stringToModel(const std::string &model) { printf("Checking model: %s\n", model.c_str()); - const char* famname = model.c_str(); - if (!strcmp(famname, "ov5647")) return OV5647; - else if (!strcmp(famname, "imx219")) return IMX219; - else if (!strcmp(famname, "imx477")) return IMX477; - else if (!strcmp(famname, "ov9281")) return OV9281; - else if (!strcmp(famname, "ov7251")) return OV7251; - else if (!strcmp(famname, "Disconnected")) return Disconnected; - else return Unknown; + const char *famname = model.c_str(); + if (!strcmp(famname, "ov5647")) + return OV5647; + else if (!strcmp(famname, "imx219")) + return IMX219; + else if (!strcmp(famname, "imx477")) + return IMX477; + else if (!strcmp(famname, "ov9281")) + return OV9281; + else if (!strcmp(famname, "ov7251")) + return OV7251; + else if (!strcmp(famname, "Disconnected")) + return Disconnected; + else + return Unknown; } diff --git a/camera_model.h b/camera_model.h index f2fcf8e..69606c7 100644 --- a/camera_model.h +++ b/camera_model.h @@ -12,4 +12,4 @@ enum CameraModel { Unknown }; -CameraModel stringToModel(const std::string& model); +CameraModel stringToModel(const std::string &model); diff --git a/camera_runner.cpp b/camera_runner.cpp index 7509f2b..2aa0ae9 100644 --- a/camera_runner.cpp +++ b/camera_runner.cpp @@ -16,8 +16,8 @@ using latch = Latch; #include #include -#include #include +#include using namespace std::chrono; using namespace std::chrono_literals; @@ -32,9 +32,8 @@ static double approxRollingAverage(double avg, double new_sample) { CameraRunner::CameraRunner(int width, int height, int rotation, std::shared_ptr cam) : m_camera(std::move(cam)), m_width(width), m_height(height), - grabber(m_camera, m_width, m_height, rotation), m_thresholder(m_width, m_height), - allocer("/dev/dma_heap/linux,cma") { - + grabber(m_camera, m_width, m_height, rotation), + m_thresholder(m_width, m_height), allocer("/dev/dma_heap/linux,cma") { grabber.setOnData( [&](libcamera::Request *request) { camera_queue.push(request); }); @@ -50,9 +49,7 @@ CameraRunner::~CameraRunner() { } } -void CameraRunner::requestShaderIdx(int idx) { - m_shaderIdx = idx; -} +void CameraRunner::requestShaderIdx(int idx) { m_shaderIdx = idx; } void CameraRunner::setCopyOptions(bool copyIn, bool copyOut) { m_copyInput = copyIn; @@ -75,7 +72,6 @@ void CameraRunner::start() { // printf("Threshold thread!\n"); auto request = camera_queue.pop(); - if (!request) { break; } @@ -84,10 +80,13 @@ void CameraRunner::start() { .at(grabber.streamConfiguration().stream()) ->planes(); - for (int i = 0; i < 3; i++) { - // std::cout << "Plane " << (i + 1) << " has fd " << planes[i].fd.get() << " with offset " << planes[i].offset << std::endl; - // std::cout << "Plane " << (i + 1) << " has fd " << planes[i].fd.get() << " with offset " << planes[i].offset << " and pitch " << static_cast(stride / 2) << std::endl; - } + // for (int i = 0; i < 3; i++) { + // std::cout << "Plane " << (i + 1) << " has fd " << + // planes[i].fd.get() << " with offset " << planes[i].offset << + // std::endl; std::cout << "Plane " << (i + 1) << " has fd " << + // planes[i].fd.get() << " with offset " << planes[i].offset << " + // and pitch " << static_cast(stride / 2) << std::endl; + // } std::array yuv_data{{ {planes[0].fd.get(), static_cast(planes[0].offset), @@ -102,23 +101,22 @@ void CameraRunner::start() { auto type = static_cast(m_shaderIdx.load()); - int out = m_thresholder.testFrame(yuv_data, - encodingFromColorspace(colorspace), - rangeFromColorspace(colorspace), - type); - + int out = m_thresholder.testFrame( + yuv_data, encodingFromColorspace(colorspace), + rangeFromColorspace(colorspace), type); if (out != 0) { /* From libcamera docs: - The timestamp, expressed in nanoseconds, represents a monotonically - increasing counter since the system boot time, as defined by the - Linux-specific CLOCK_BOOTTIME clock id. + The timestamp, expressed in nanoseconds, represents a + monotonically increasing counter since the system boot time, as + defined by the Linux-specific CLOCK_BOOTTIME clock id. */ - uint64_t sensorTimestamp = static_cast(request->metadata() - .get(libcamera::controls::SensorTimestamp) - .value_or(0)); + uint64_t sensorTimestamp = static_cast( + request->metadata() + .get(libcamera::controls::SensorTimestamp) + .value_or(0)); gpu_queue.push({out, type, sensorTimestamp}); } @@ -127,8 +125,10 @@ void CameraRunner::start() { steady_clock::now() - begintime; if (elapsedMillis > 0.9ms) { // gpuTimeAvgMs = - // approxRollingAverage(gpuTimeAvgMs, elapsedMillis.count()); - // std::cout << "GLProcess: " << elapsedMillis.count() << std::endl; + // approxRollingAverage(gpuTimeAvgMs, + // elapsedMillis.count()); + // std::cout << "GLProcess: " << elapsedMillis.count() << + // std::endl; } { @@ -200,9 +200,9 @@ void CameraRunner::start() { // auto now = steady_clock::now(); // std::chrono::duration elapsed = - // (now - lastTime); - // fpsTimeAvgMs = approxRollingAverage(fpsTimeAvgMs, elapsed.count()); - // printf("Delta %.2f FPS: %.2f\n", fpsTimeAvgMs, + // (now - lastTime); + // fpsTimeAvgMs = approxRollingAverage(fpsTimeAvgMs, + // elapsed.count()); printf("Delta %.2f FPS: %.2f\n", fpsTimeAvgMs, // 1000.0 / fpsTimeAvgMs); // lastTime = now; } diff --git a/camera_runner.h b/camera_runner.h index ada44cb..a02c3d4 100644 --- a/camera_runner.h +++ b/camera_runner.h @@ -18,13 +18,13 @@ struct MatPair { cv::Mat color; cv::Mat processed; - long captureTimestamp; // In libcamera time units, hopefully uS? TODO actually implement + long captureTimestamp; // In libcamera time units, hopefully uS? TODO + // actually implement int32_t frameProcessingType; // enum value of shader run on the image MatPair() = default; explicit MatPair(int width, int height) - : color(height, width, CV_8UC3), - processed(height, width, CV_8UC1) {} + : color(height, width, CV_8UC3), processed(height, width, CV_8UC1) {} }; // Note: destructing this class without calling `stop` if `start` was called @@ -55,9 +55,7 @@ class CameraRunner { void requestShaderIdx(int idx); - private: - struct GpuQueueData { int fd; ProcessType type; @@ -81,7 +79,6 @@ class CameraRunner { std::thread threshold; std::thread display; - std::atomic m_shaderIdx = 0; std::atomic m_copyInput; diff --git a/gl_hsv_thresholder.cpp b/gl_hsv_thresholder.cpp index 7023095..7481b34 100644 --- a/gl_hsv_thresholder.cpp +++ b/gl_hsv_thresholder.cpp @@ -1,6 +1,6 @@ #include "gl_hsv_thresholder.h" -#include "glerror.h" #include "gl_shader_source.h" +#include "glerror.h" #include #include @@ -78,25 +78,24 @@ GlHsvThresholder::GlHsvThresholder(int width, int height) m_status = createHeadless(); m_context = m_status.context; m_display = m_status.display; - } GlHsvThresholder::~GlHsvThresholder() { - for (auto& program : m_programs) + for (auto &program : m_programs) glDeleteProgram(program); glDeleteBuffers(1, &m_quad_vbo); - for (const auto [key, value]: m_framebuffers) { + for (const auto [key, value] : m_framebuffers) { glDeleteFramebuffers(1, &value); } destroyHeadless(m_status); } -static void on_gl_error(EGLenum error,const char *command,EGLint messageType,EGLLabelKHR threadLabel,EGLLabelKHR objectLabel,const char* message) -{ +static void on_gl_error(EGLenum error, const char *command, EGLint messageType, + EGLLabelKHR threadLabel, EGLLabelKHR objectLabel, + const char *message) { printf("Error111: %s\n", message); - } void GlHsvThresholder::start(const std::vector &output_buf_fds) { @@ -108,7 +107,8 @@ void GlHsvThresholder::start(const std::vector &output_buf_fds) { static auto eglDestroyImageKHR = (PFNEGLDESTROYIMAGEKHRPROC)eglGetProcAddress("eglDestroyImageKHR"); static auto glDebugMessageCallbackKHR = - (PFNEGLDEBUGMESSAGECONTROLKHRPROC)eglGetProcAddress("glDebugMessageCallbackKHR"); + (PFNEGLDEBUGMESSAGECONTROLKHRPROC)eglGetProcAddress( + "glDebugMessageCallbackKHR"); if (!eglMakeCurrent(m_display, EGL_NO_SURFACE, EGL_NO_SURFACE, m_context)) { throw std::runtime_error("failed to bind egl context"); @@ -165,7 +165,7 @@ void GlHsvThresholder::start(const std::vector &output_buf_fds) { glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image); GLERROR(); - eglDestroyImageKHR(m_display, image); + eglDestroyImageKHR(m_display, image); GLERROR(); GLuint framebuffer; @@ -216,7 +216,8 @@ void GlHsvThresholder::start(const std::vector &output_buf_fds) { GLERROR(); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); GLERROR(); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_width, m_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_width, m_height, 0, GL_RGBA, + GL_UNSIGNED_BYTE, nullptr); GLERROR(); m_grayscale_texture = grayscale_texture; @@ -228,10 +229,12 @@ void GlHsvThresholder::start(const std::vector &output_buf_fds) { GLERROR(); glBindFramebuffer(GL_FRAMEBUFFER, grayscale_buffer); GLERROR(); - glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_grayscale_texture, 0); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, + GL_TEXTURE_2D, m_grayscale_texture, 0); GLERROR(); - if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != + GL_FRAMEBUFFER_COMPLETE) { throw std::runtime_error("failed to complete grayscale_buffer"); } @@ -252,7 +255,8 @@ void GlHsvThresholder::start(const std::vector &output_buf_fds) { GLERROR(); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_MIRRORED_REPEAT); GLERROR(); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_width / 4, m_height / 4, 0, GL_RGB, GL_UNSIGNED_BYTE, nullptr); + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_width / 4, m_height / 4, 0, + GL_RGB, GL_UNSIGNED_BYTE, nullptr); GLERROR(); m_min_max_texture = min_max_texture; @@ -264,10 +268,12 @@ void GlHsvThresholder::start(const std::vector &output_buf_fds) { GLERROR(); glBindFramebuffer(GL_FRAMEBUFFER, min_max_framebuffer); GLERROR(); - glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_min_max_texture, 0); + glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, + GL_TEXTURE_2D, m_min_max_texture, 0); GLERROR(); - if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) { + if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != + GL_FRAMEBUFFER_COMPLETE) { throw std::runtime_error("failed to complete grayscale_buffer"); } @@ -276,7 +282,8 @@ void GlHsvThresholder::start(const std::vector &output_buf_fds) { } void GlHsvThresholder::release() { - if (!eglMakeCurrent(m_display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT)) { + if (!eglMakeCurrent(m_display, EGL_NO_SURFACE, EGL_NO_SURFACE, + EGL_NO_CONTEXT)) { throw std::runtime_error("failed to bind egl context"); } } @@ -350,7 +357,6 @@ int GlHsvThresholder::testFrame( std::to_string(yuv_plane_data[0].fd)); } - GLuint texture; glGenTextures(1, &texture); GLERROR(); diff --git a/gl_hsv_thresholder.h b/gl_hsv_thresholder.h index 2bf02e2..21b8875 100644 --- a/gl_hsv_thresholder.h +++ b/gl_hsv_thresholder.h @@ -16,7 +16,7 @@ #include "headless_opengl.h" -enum class ProcessType: int32_t { +enum class ProcessType : int32_t { None = 0, Hsv, Gray, @@ -56,7 +56,6 @@ class GlHsvThresholder { void setHsvThresholds(double hl, double sl, double vl, double hu, double su, double vu, bool hueInverted); - private: int m_width; int m_height; diff --git a/gl_shader_source.h b/gl_shader_source.h index 306e437..8cd59f8 100644 --- a/gl_shader_source.h +++ b/gl_shader_source.h @@ -1,144 +1,145 @@ static constexpr const char *VERTEX_SOURCE = - "#version 100\n" - "" - "attribute vec2 vertex;" - "varying vec2 texcoord;" - "" - "void main(void) {" - " texcoord = 0.5 * (vertex + 1.0);" - " gl_Position = vec4(vertex, 0.0, 1.0);" - "}"; + "#version 100\n" + "" + "attribute vec2 vertex;" + "varying vec2 texcoord;" + "" + "void main(void) {" + " texcoord = 0.5 * (vertex + 1.0);" + " gl_Position = vec4(vertex, 0.0, 1.0);" + "}"; static constexpr const char *NONE_FRAGMENT_SOURCE = - "#version 100\n" - "#extension GL_OES_EGL_image_external : require\n" - "" - "precision lowp float;" - "precision lowp int;" - "" - "varying vec2 texcoord;" - "" - "uniform samplerExternalOES tex;" - "" - "void main(void) {" - " vec3 color = texture2D(tex, texcoord).rgb;" - " gl_FragColor = vec4(color.bgr, 0);" - "}"; + "#version 100\n" + "#extension GL_OES_EGL_image_external : require\n" + "" + "precision lowp float;" + "precision lowp int;" + "" + "varying vec2 texcoord;" + "" + "uniform samplerExternalOES tex;" + "" + "void main(void) {" + " vec3 color = texture2D(tex, texcoord).rgb;" + " gl_FragColor = vec4(color.bgr, 0);" + "}"; static constexpr const char *HSV_FRAGMENT_SOURCE = - "#version 100\n" - "#extension GL_OES_EGL_image_external : require\n" - "" - "precision lowp float;" - "precision lowp int;" - "" - "varying vec2 texcoord;" - "" - "uniform vec3 lowerThresh;" - "uniform vec3 upperThresh;" - "uniform bool invertHue;" - "uniform samplerExternalOES tex;" - "" - "vec3 rgb2hsv(const vec3 p) {" - " const vec4 H = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);" - // Using ternary seems to be faster than using mix and step - " vec4 o = mix(vec4(p.bg, H.wz), vec4(p.gb, H.xy), step(p.b, p.g));" - " vec4 t = mix(vec4(o.xyw, p.r), vec4(p.r, o.yzx), step(o.x, p.r));" - "" - " float O = t.x - min(t.w, t.y);" - " const float n = 1.0e-10;" - " return vec3(abs(t.z + (t.w - t.y) / (6.0 * O + n)), O / (t.x + n), " - "t.x);" - "}" - "" - "bool inRange(vec3 hsv) {" - " const float epsilon = 0.0001;" - " bvec3 botBool = greaterThanEqual(hsv, lowerThresh - epsilon);" - " bvec3 topBool = lessThanEqual(hsv, upperThresh + epsilon);" - " if (invertHue) {" - " return !(botBool.x && topBool.x) && all(botBool.yz) && all(topBool.yz);" - " } else {" - " return all(botBool) && all(topBool);" - " }" - "}" - "" - "void main(void) {" - " vec3 col = texture2D(tex, texcoord).rgb;" - " gl_FragColor = vec4(col.bgr, int(inRange(rgb2hsv(col))));" - "}"; + "#version 100\n" + "#extension GL_OES_EGL_image_external : require\n" + "" + "precision lowp float;" + "precision lowp int;" + "" + "varying vec2 texcoord;" + "" + "uniform vec3 lowerThresh;" + "uniform vec3 upperThresh;" + "uniform bool invertHue;" + "uniform samplerExternalOES tex;" + "" + "vec3 rgb2hsv(const vec3 p) {" + " const vec4 H = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);" + // Using ternary seems to be faster than using mix and step + " vec4 o = mix(vec4(p.bg, H.wz), vec4(p.gb, H.xy), step(p.b, p.g));" + " vec4 t = mix(vec4(o.xyw, p.r), vec4(p.r, o.yzx), step(o.x, p.r));" + "" + " float O = t.x - min(t.w, t.y);" + " const float n = 1.0e-10;" + " return vec3(abs(t.z + (t.w - t.y) / (6.0 * O + n)), O / (t.x + n), " + "t.x);" + "}" + "" + "bool inRange(vec3 hsv) {" + " const float epsilon = 0.0001;" + " bvec3 botBool = greaterThanEqual(hsv, lowerThresh - epsilon);" + " bvec3 topBool = lessThanEqual(hsv, upperThresh + epsilon);" + " if (invertHue) {" + " return !(botBool.x && topBool.x) && all(botBool.yz) && " + "all(topBool.yz);" + " } else {" + " return all(botBool) && all(topBool);" + " }" + "}" + "" + "void main(void) {" + " vec3 col = texture2D(tex, texcoord).rgb;" + " gl_FragColor = vec4(col.bgr, int(inRange(rgb2hsv(col))));" + "}"; static constexpr const char *GRAY_FRAGMENT_SOURCE = - "#version 100\n" - "#extension GL_OES_EGL_image_external : require\n" - "" - "precision lowp float;" - "precision lowp int;" - "" - "varying vec2 texcoord;" - "" - "uniform samplerExternalOES tex;" - "" - "void main(void) {" - " vec3 gammaColor = texture2D(tex, texcoord).rgb;" - " vec3 color = pow(gammaColor, vec3(2.0));" - " float gray = dot(color, vec3(0.2126, 0.7152, 0.0722));" - " float gammaGray = sqrt(gray);" - " gl_FragColor = vec4(color.bgr, gammaGray);" - "}"; + "#version 100\n" + "#extension GL_OES_EGL_image_external : require\n" + "" + "precision lowp float;" + "precision lowp int;" + "" + "varying vec2 texcoord;" + "" + "uniform samplerExternalOES tex;" + "" + "void main(void) {" + " vec3 gammaColor = texture2D(tex, texcoord).rgb;" + " vec3 color = pow(gammaColor, vec3(2.0));" + " float gray = dot(color, vec3(0.2126, 0.7152, 0.0722));" + " float gammaGray = sqrt(gray);" + " gl_FragColor = vec4(color.bgr, gammaGray);" + "}"; static constexpr const char *TILING_FRAGMENT_SOURCE = - "#version 100\n" - "" - "precision lowp float;" - "precision lowp int;" - "" - "uniform sampler2D tex;" - "varying vec2 texcoord;" - "uniform vec2 resolution_in;" - "" - "void main(void) {" - " float max_so_far = 0.0;" - " float min_so_far = 1.0;" - " for (int i = 0; i < 4; i++) {" - " for(int j = 0; j < 4; j++) {" - " vec2 offset = vec2(float(i), float(j)) / resolution_in;" - " float cur = texture2D(tex, texcoord + offset).w;" - " max_so_far = max(max_so_far, cur);" - " min_so_far = min(min_so_far, cur);" - " }" - " }" - " gl_FragColor = vec4(max_so_far, min_so_far, 0.0, 0.0);" - "}"; + "#version 100\n" + "" + "precision lowp float;" + "precision lowp int;" + "" + "uniform sampler2D tex;" + "varying vec2 texcoord;" + "uniform vec2 resolution_in;" + "" + "void main(void) {" + " float max_so_far = 0.0;" + " float min_so_far = 1.0;" + " for (int i = 0; i < 4; i++) {" + " for(int j = 0; j < 4; j++) {" + " vec2 offset = vec2(float(i), float(j)) / resolution_in;" + " float cur = texture2D(tex, texcoord + offset).w;" + " max_so_far = max(max_so_far, cur);" + " min_so_far = min(min_so_far, cur);" + " }" + " }" + " gl_FragColor = vec4(max_so_far, min_so_far, 0.0, 0.0);" + "}"; static constexpr const char *THRESHOLDING_FRAGMENT_SOURCE = - "#version 100\n" - "" - "precision lowp float;" - "precision lowp int;" - "" - "uniform sampler2D tex;" - "uniform sampler2D tiles;" - "varying vec2 texcoord;" - "uniform vec2 tile_resolution;" - "" - "void main(void) {" - " float max_so_far = 0.0;" - " float min_so_far = 1.0;" - " for (int i = -1; i <= 1; i++) {" - " for(int j = -1; j <= 1; j++) {" - " vec2 offset = vec2(float(i), float(j)) / tile_resolution;" - " vec2 cur = texture2D(tiles, texcoord + offset).xy;" - " max_so_far = max(max_so_far, cur.x);" - " min_so_far = min(min_so_far, cur.y);" - " }" - " }" - "" - " float gray = texture2D(tex, texcoord).w;" - " vec3 color = texture2D(tex, texcoord).rgb;" - " float output_ = 0.5;" - " if ((max_so_far - min_so_far) > (0.1)) {" - " float mean = min_so_far + (max_so_far - min_so_far) / 2.0;" - " output_ = step(mean, gray);" - " }" - " gl_FragColor = vec4(color.bgr, output_);" - "}"; + "#version 100\n" + "" + "precision lowp float;" + "precision lowp int;" + "" + "uniform sampler2D tex;" + "uniform sampler2D tiles;" + "varying vec2 texcoord;" + "uniform vec2 tile_resolution;" + "" + "void main(void) {" + " float max_so_far = 0.0;" + " float min_so_far = 1.0;" + " for (int i = -1; i <= 1; i++) {" + " for(int j = -1; j <= 1; j++) {" + " vec2 offset = vec2(float(i), float(j)) / tile_resolution;" + " vec2 cur = texture2D(tiles, texcoord + offset).xy;" + " max_so_far = max(max_so_far, cur.x);" + " min_so_far = min(min_so_far, cur.y);" + " }" + " }" + "" + " float gray = texture2D(tex, texcoord).w;" + " vec3 color = texture2D(tex, texcoord).rgb;" + " float output_ = 0.5;" + " if ((max_so_far - min_so_far) > (0.1)) {" + " float mean = min_so_far + (max_so_far - min_so_far) / 2.0;" + " output_ = step(mean, gray);" + " }" + " gl_FragColor = vec4(color.bgr, output_);" + "}"; diff --git a/headless_opengl.cpp b/headless_opengl.cpp index 64ac6f6..c35bbcf 100644 --- a/headless_opengl.cpp +++ b/headless_opengl.cpp @@ -40,10 +40,8 @@ static const EGLint configAttribs[] = {EGL_RED_SIZE, EGL_NONE}; static const EGLint contextAttribs[] = { - EGL_CONTEXT_CLIENT_VERSION, 2, - EGL_CONTEXT_FLAGS_KHR, EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR, - EGL_NONE -}; + EGL_CONTEXT_CLIENT_VERSION, 2, EGL_CONTEXT_FLAGS_KHR, + EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR, EGL_NONE}; HeadlessData createHeadless() { std::vector paths = {"/dev/dri/card1", "/dev/dri/card0"}; diff --git a/libcamera_jni.cpp b/libcamera_jni.cpp index b360919..712df9d 100644 --- a/libcamera_jni.cpp +++ b/libcamera_jni.cpp @@ -62,16 +62,16 @@ Java_org_photonvision_raspi_LibCameraJNI_createCamera(JNIEnv *env, jclass, JNIEXPORT jint Java_org_photonvision_raspi_LibCameraJNI_getSensorModelRaw( JNIEnv *env, jclass clazz) { - bool runner_exists = runner > 0; + bool runner_exists = runner != nullptr; if (!runner_exists) { - Java_org_photonvision_raspi_LibCameraJNI_createCamera(env, clazz, - 320, 240, 30); + Java_org_photonvision_raspi_LibCameraJNI_createCamera(env, clazz, 320, + 240, 30); } if (!runner) { return 0; } - + jint model = runner->model(); if (!runner_exists) { @@ -81,7 +81,6 @@ JNIEXPORT jint Java_org_photonvision_raspi_LibCameraJNI_getSensorModelRaw( return model; } - JNIEXPORT jboolean JNICALL Java_org_photonvision_raspi_LibCameraJNI_startCamera(JNIEnv *, jclass) { if (!runner) { @@ -140,7 +139,8 @@ JNIEXPORT jboolean JNICALL Java_org_photonvision_raspi_LibCameraJNI_setExposure( return true; } -JNIEXPORT jboolean JNICALL Java_org_photonvision_raspi_LibCameraJNI_setAutoExposure( +JNIEXPORT jboolean JNICALL +Java_org_photonvision_raspi_LibCameraJNI_setAutoExposure( JNIEnv *env, jclass, jboolean doAutoExposure) { if (!runner) { return false; @@ -166,7 +166,7 @@ JNIEXPORT jboolean JNICALL Java_org_photonvision_raspi_LibCameraJNI_setAwbGain( if (!runner) { return false; } - + printf("Setting red %f blue %f\n", (float)red, (float)blue); runner->cameraGrabber().cameraSettings().awbRedGain = red; @@ -196,9 +196,10 @@ JNIEXPORT jboolean JNICALL Java_org_photonvision_raspi_LibCameraJNI_setRotation( return true; } - JNIEXPORT jboolean JNICALL -Java_org_photonvision_raspi_LibCameraJNI_setFramesToCopy(JNIEnv *, jclass, jboolean copyIn, jboolean copyOut) { +Java_org_photonvision_raspi_LibCameraJNI_setFramesToCopy(JNIEnv *, jclass, + jboolean copyIn, + jboolean copyOut) { if (!runner) { return false; } @@ -210,19 +211,20 @@ Java_org_photonvision_raspi_LibCameraJNI_setFramesToCopy(JNIEnv *, jclass, jbool static MatPair pair = {}; JNIEXPORT jlong JNICALL -Java_org_photonvision_raspi_LibCameraJNI_getFrameCaptureTime(JNIEnv *env, jclass) { +Java_org_photonvision_raspi_LibCameraJNI_getFrameCaptureTime(JNIEnv *env, + jclass) { return pair.captureTimestamp; } JNIEXPORT jlong JNICALL -Java_org_photonvision_raspi_LibCameraJNI_getLibcameraTimestamp(JNIEnv *env, jclass) { +Java_org_photonvision_raspi_LibCameraJNI_getLibcameraTimestamp(JNIEnv *env, + jclass) { timespec ts; clock_gettime(CLOCK_BOOTTIME, &ts); uint64_t now_nsec = (uint64_t)ts.tv_sec * 1000000000ULL + ts.tv_nsec; return (jlong)now_nsec; } - JNIEXPORT jboolean JNICALL Java_org_photonvision_raspi_LibCameraJNI_awaitNewFrame(JNIEnv *env, jclass) { if (!runner) { @@ -257,7 +259,7 @@ Java_org_photonvision_raspi_LibCameraJNI_takeProcessedFrame(JNIEnv *env, JNIEXPORT jboolean JNICALL Java_org_photonvision_raspi_LibCameraJNI_setGpuProcessType(JNIEnv *env, jclass, - jint idx) { + jint idx) { if (!runner) { return false; } @@ -272,4 +274,25 @@ Java_org_photonvision_raspi_LibCameraJNI_getGpuProcessType(JNIEnv *, jclass) { return pair.frameProcessingType; } +JNIEXPORT jboolean JNICALL +Java_org_photonvision_raspi_LibCameraJNI_setAutofocus(JNIEnv *env, jclass, + jboolean doAutofocus) { + if (!runner) { + return false; + } + + runner->cameraGrabber().cameraSettings().doAutofocus = doAutofocus; + return true; +} + +JNIEXPORT jint JNICALL +Java_org_photonvision_raspi_LibCameraJNI_getAutofocusStatus(JNIEnv *env, + jclass) { + if (!runner) { + return 3; + } + + return runner->cameraGrabber().getAutofocusStatus(); +} + } // extern "C" diff --git a/libcamera_jni.hpp b/libcamera_jni.hpp index 939e1e5..8acc157 100644 --- a/libcamera_jni.hpp +++ b/libcamera_jni.hpp @@ -121,6 +121,12 @@ Java_org_photonvision_raspi_LibCameraJNI_getGpuProcessType(JNIEnv *, jclass); JNIEXPORT jboolean JNICALL Java_org_photonvision_raspi_LibCameraJNI_setGpuProcessType(JNIEnv *, jclass, jint); +JNIEXPORT jboolean JNICALL +Java_org_photonvision_raspi_LibCameraJNI_setAutoFocus(JNIEnv *env, jclass, + jboolean doAutoFocus); +JNIEXPORT jint JNICALL +Java_org_photonvision_raspi_LibCameraJNI_getAutofocusStatus(JNIEnv *env, jclass); + #ifdef __cplusplus } #endif diff --git a/main.cpp b/main.cpp index 40ac6b8..7effb6d 100644 --- a/main.cpp +++ b/main.cpp @@ -7,7 +7,7 @@ #include #include -enum class ProcessType_: int32_t { +enum class ProcessType_ : int32_t { None = 0, Hsv, Gray, @@ -16,37 +16,56 @@ enum class ProcessType_: int32_t { void test_res(int width, int height) { int rotation = 180; - Java_org_photonvision_raspi_LibCameraJNI_createCamera(nullptr, nullptr, - width, height, rotation); - // Java_org_photonvision_raspi_LibCameraJNI_setGpuProcessType(nullptr, nullptr, 1); - Java_org_photonvision_raspi_LibCameraJNI_setGpuProcessType(nullptr, nullptr, (jint)ProcessType_::Hsv); - Java_org_photonvision_raspi_LibCameraJNI_setFramesToCopy(nullptr, nullptr, true, true); + Java_org_photonvision_raspi_LibCameraJNI_createCamera( + nullptr, nullptr, width, height, rotation); + // Java_org_photonvision_raspi_LibCameraJNI_setGpuProcessType(nullptr, + // nullptr, 1); + Java_org_photonvision_raspi_LibCameraJNI_setGpuProcessType( + nullptr, nullptr, (jint)ProcessType_::Hsv); + Java_org_photonvision_raspi_LibCameraJNI_setFramesToCopy(nullptr, nullptr, + true, true); Java_org_photonvision_raspi_LibCameraJNI_startCamera(nullptr, nullptr); - Java_org_photonvision_raspi_LibCameraJNI_setExposure(nullptr, nullptr, 80 * 800); - Java_org_photonvision_raspi_LibCameraJNI_setBrightness(nullptr, nullptr, 0.0); - Java_org_photonvision_raspi_LibCameraJNI_setAnalogGain(nullptr, nullptr, 20); - Java_org_photonvision_raspi_LibCameraJNI_setAutoExposure(nullptr, nullptr, true); + Java_org_photonvision_raspi_LibCameraJNI_setExposure(nullptr, nullptr, + 80 * 800); + Java_org_photonvision_raspi_LibCameraJNI_setBrightness(nullptr, nullptr, + 0.0); + Java_org_photonvision_raspi_LibCameraJNI_setAnalogGain(nullptr, nullptr, + 20); + Java_org_photonvision_raspi_LibCameraJNI_setAutoExposure(nullptr, nullptr, + true); auto start = std::chrono::steady_clock::now(); - while (std::chrono::steady_clock::now() - start < std::chrono::seconds(3)) { - bool ready = Java_org_photonvision_raspi_LibCameraJNI_awaitNewFrame(nullptr, nullptr); + while (std::chrono::steady_clock::now() - start < std::chrono::seconds(3)) { + bool ready = Java_org_photonvision_raspi_LibCameraJNI_awaitNewFrame( + nullptr, nullptr); if (ready) { static int i = 0; - cv::Mat color_mat = *(cv::Mat*)Java_org_photonvision_raspi_LibCameraJNI_takeColorFrame(nullptr, nullptr); - cv::Mat threshold_mat = *(cv::Mat*)Java_org_photonvision_raspi_LibCameraJNI_takeProcessedFrame(nullptr, nullptr); + cv::Mat color_mat = + *(cv::Mat *) + Java_org_photonvision_raspi_LibCameraJNI_takeColorFrame( + nullptr, nullptr); + cv::Mat threshold_mat = + *(cv::Mat *) + Java_org_photonvision_raspi_LibCameraJNI_takeProcessedFrame( + nullptr, nullptr); - uint64_t captureTime = Java_org_photonvision_raspi_LibCameraJNI_getFrameCaptureTime(nullptr, nullptr); - uint64_t now = Java_org_photonvision_raspi_LibCameraJNI_getLibcameraTimestamp(nullptr, nullptr); - printf("now %lu capture %lu latency %f\n", now, captureTime, (double)(now - captureTime) / 1000000.0); + uint64_t captureTime = + Java_org_photonvision_raspi_LibCameraJNI_getFrameCaptureTime( + nullptr, nullptr); + uint64_t now = + Java_org_photonvision_raspi_LibCameraJNI_getLibcameraTimestamp( + nullptr, nullptr); + printf("now %lu capture %lu latency %f\n", now, captureTime, + (double)(now - captureTime) / 1000000.0); i++; static char arr[50]; - snprintf(arr,sizeof(arr),"color_%i.png", i); + snprintf(arr, sizeof(arr), "color_%i.png", i); cv::imwrite(arr, color_mat); - snprintf(arr,sizeof(arr),"thresh_%i.png", i); + snprintf(arr, sizeof(arr), "thresh_%i.png", i); cv::imwrite(arr, threshold_mat); } }