/** *

Base class for JavaCV applications.

* *

This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version.

* *

This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details.

* *

You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., 51 * Franklin St, Fifth Floor, Boston, MA 02110-1301 USA

* *

Copyright (c) 2011 Marsette A. Vona

**/ import com.googlecode.javacpp.*; import com.googlecode.javacv.*; import com.googlecode.javacv.cpp.*; import static com.googlecode.javacv.cpp.opencv_calib3d.*; import static com.googlecode.javacv.cpp.opencv_contrib.*; import static com.googlecode.javacv.cpp.opencv_core.*; import static com.googlecode.javacv.cpp.opencv_features2d.*; import static com.googlecode.javacv.cpp.opencv_flann.*; import static com.googlecode.javacv.cpp.opencv_highgui.*; import static com.googlecode.javacv.cpp.opencv_imgproc.*; import static com.googlecode.javacv.cpp.opencv_legacy.*; import static com.googlecode.javacv.cpp.opencv_ml.*; import static com.googlecode.javacv.cpp.opencv_objdetect.*; import static com.googlecode.javacv.cpp.opencv_video.*; import java.awt.Canvas; import java.awt.event.*; import static java.awt.event.InputEvent.*; import static java.awt.event.MouseEvent.*; import static java.awt.event.KeyEvent.*; import java.text.DecimalFormat; import java.awt.image.BufferedImage; import java.io.IOException; import java.io.PrintStream; /** *

Base class for JavaCV applications.

* *

See {@link CvDemo} for an example.

* *

Reads input frames from either a camera or video file. Processes each * frame (no operation by default) and displays the results in any combination * of

The default is (only) {@link #useCanvasFrame}. * *

This class provides basic functions to pause/unpause the live input, emit * debugging information for selected frames, and to save both raw capture and * processed images.

* *

You can use this class as a base for your own work:

    * *
  1. Create a subclass that overrides the necessary functions. The main * place you probably need to do this is the {@link #process} function. Also, * the various *Ext*() functions are hooks that mostly do nothing in the base * class, but are convenient places for you to insert your own code in a * subclass (but note that you can also override even non-Ext functions, if * needed).
  2. * *
  3. Construct an instance of the subclass and configure any instance fields, * e.g. set {@link #maxFPS}, etc.
  4. * *
  5. Call {@link #init(int, String[])} or {@link #init()} and then {@link * #mainLoop}
  6. * *
See {@link CvDemo} for an example.

* * @author Marsette Vona **/ public class CvBase { private static final String svnid = "$Id: CvBase.java 44 2012-02-23 17:27:23Z vona $"; /** Default input specification (use next available camera). **/ public static final String DEF_INPUT = "-1"; /** Default width and height for camera input. **/ public static final int DEF_CAM_W = 640, DEF_CAM_H = 480; /** Default maximum frames per second for processing and local display. **/ public static final float DEF_MAX_FPS = 10.0f; /** Default minimum inter-frame delay in ms. **/ public static final int DEF_MIN_DELAY_MS = 20; //opencv has issues if lower /** Default application name. **/ public static final String DEF_APPNAME = "cvbase"; /** Number formatter. **/ public static final DecimalFormat FMT = new DecimalFormat() { { setMaximumFractionDigits(3); } }; /** Format a number for printing. **/ public static final String fmt(double d) { return FMT.format(d); } /** OpenCV capture properties **/ public static final int[] CAP_PROPS = new int[] { CV_CAP_PROP_FRAME_WIDTH, CV_CAP_PROP_FRAME_HEIGHT, CV_CAP_PROP_BRIGHTNESS, CV_CAP_PROP_SATURATION, CV_CAP_PROP_HUE, CV_CAP_PROP_GAIN, CV_CAP_PROP_EXPOSURE }; /** OpenCV capture property names **/ public static final String[] CAP_PROP_NAMES = new String[] { "width", "height", "brightness", "saturation", "hue", "gain", "exposure" }; /** V4L2 control constants **/ public static final int V4L2_CTRL_CLASS_USER = 0x00980000, /* Old-style 'user' controls */ V4L2_CTRL_CLASS_MPEG = 0x00990000, /* MPEG-compression controls */ V4L2_CTRL_CLASS_CAMERA = 0x009a0000, /* Camera class controls */ /* User-class control IDs defined by V4L2 */ V4L2_CID_BASE = (V4L2_CTRL_CLASS_USER | 0x900), V4L2_CID_USER_BASE = V4L2_CID_BASE, /* IDs reserved for driver specific controls */ V4L2_CID_PRIVATE_BASE = 0x08000000, V4L2_CID_USER_CLASS = (V4L2_CTRL_CLASS_USER | 1), V4L2_CID_BRIGHTNESS = (V4L2_CID_BASE+0), V4L2_CID_CONTRAST = (V4L2_CID_BASE+1), V4L2_CID_SATURATION = (V4L2_CID_BASE+2), V4L2_CID_HUE = (V4L2_CID_BASE+3), V4L2_CID_AUDIO_VOLUME = (V4L2_CID_BASE+5), V4L2_CID_AUDIO_BALANCE = (V4L2_CID_BASE+6), V4L2_CID_AUDIO_BASS = (V4L2_CID_BASE+7), V4L2_CID_AUDIO_TREBLE = (V4L2_CID_BASE+8), V4L2_CID_AUDIO_MUTE = (V4L2_CID_BASE+9), V4L2_CID_AUDIO_LOUDNESS = (V4L2_CID_BASE+10), V4L2_CID_BLACK_LEVEL = (V4L2_CID_BASE+11), /* Deprecated */ V4L2_CID_AUTO_WHITE_BALANCE = (V4L2_CID_BASE+12), V4L2_CID_DO_WHITE_BALANCE = (V4L2_CID_BASE+13), V4L2_CID_RED_BALANCE = (V4L2_CID_BASE+14), V4L2_CID_BLUE_BALANCE = (V4L2_CID_BASE+15), V4L2_CID_GAMMA = (V4L2_CID_BASE+16), V4L2_CID_WHITENESS = (V4L2_CID_GAMMA), /* Deprecated */ V4L2_CID_EXPOSURE = (V4L2_CID_BASE+17), V4L2_CID_AUTOGAIN = (V4L2_CID_BASE+18), V4L2_CID_GAIN = (V4L2_CID_BASE+19), V4L2_CID_HFLIP = (V4L2_CID_BASE+20), V4L2_CID_VFLIP = (V4L2_CID_BASE+21), /* Deprecated, use V4L2_CID_PAN_RESET and V4L2_CID_TILT_RESET */ V4L2_CID_HCENTER_DEPRECATED = (V4L2_CID_BASE+22), V4L2_CID_VCENTER_DEPRECATED = (V4L2_CID_BASE+23), V4L2_CID_POWER_LINE_FREQUENCY = (V4L2_CID_BASE+24), V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0, V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1, V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2, V4L2_CID_HUE_AUTO = (V4L2_CID_BASE+25), V4L2_CID_WHITE_BALANCE_TEMPERATURE = (V4L2_CID_BASE+26), V4L2_CID_SHARPNESS = (V4L2_CID_BASE+27), V4L2_CID_BACKLIGHT_COMPENSATION = (V4L2_CID_BASE+28), V4L2_CID_LASTP1 = (V4L2_CID_BASE+29), /*last CID+1*/ /* Camera class control IDs */ V4L2_CID_CAMERA_CLASS_BASE = (V4L2_CTRL_CLASS_CAMERA | 0x900), V4L2_CID_CAMERA_CLASS = (V4L2_CTRL_CLASS_CAMERA | 1), V4L2_CID_EXPOSURE_AUTO = (V4L2_CID_CAMERA_CLASS_BASE+1), V4L2_EXPOSURE_AUTO = 0, V4L2_EXPOSURE_MANUAL = 1, V4L2_EXPOSURE_SHUTTER_PRIORITY = 2, V4L2_EXPOSURE_APERTURE_PRIORITY = 3, V4L2_CID_EXPOSURE_ABSOLUTE = (V4L2_CID_CAMERA_CLASS_BASE+2), V4L2_CID_EXPOSURE_AUTO_PRIORITY = (V4L2_CID_CAMERA_CLASS_BASE+3), V4L2_CID_PAN_RELATIVE = (V4L2_CID_CAMERA_CLASS_BASE+4), V4L2_CID_TILT_RELATIVE = (V4L2_CID_CAMERA_CLASS_BASE+5), V4L2_CID_PAN_RESET = (V4L2_CID_CAMERA_CLASS_BASE+6), V4L2_CID_TILT_RESET = (V4L2_CID_CAMERA_CLASS_BASE+7), V4L2_CID_PAN_ABSOLUTE = (V4L2_CID_CAMERA_CLASS_BASE+8), V4L2_CID_TILT_ABSOLUTE = (V4L2_CID_CAMERA_CLASS_BASE+9), V4L2_CID_FOCUS_ABSOLUTE = (V4L2_CID_CAMERA_CLASS_BASE+10), V4L2_CID_FOCUS_RELATIVE = (V4L2_CID_CAMERA_CLASS_BASE+11), V4L2_CID_FOCUS_AUTO = (V4L2_CID_CAMERA_CLASS_BASE+12); /** V4L2 auto camera properties **/ public static final int[] V4L2_AUTO_PROPS = new int[] { V4L2_CID_AUTO_WHITE_BALANCE, V4L2_CID_AUTOGAIN, V4L2_CID_HUE_AUTO, V4L2_CID_EXPOSURE_AUTO, V4L2_CID_EXPOSURE_AUTO_PRIORITY, V4L2_CID_FOCUS_AUTO }; /** V4L2 auto camera property names **/ public static final String[] V4L2_AUTO_PROP_NAMES = new String[] { "auto whitebalance", "auto gain", "auto hue", "auto exposure", "auto exposure priority", "auto focus" }; /** V4L2 auto camera property off values **/ public static final int[] V4L2_AUTO_PROP_OFF_VALS = new int[] { 0, //whitebalance 0, //gain 0, //hue V4L2_EXPOSURE_MANUAL, //exposure V4L2_EXPOSURE_APERTURE_PRIORITY, //exp priority (auto time, manual iris) 0 //focus }; /** V4L2 auto camera property on values **/ public static final int[] V4L2_AUTO_PROP_ON_VALS = new int[] { 1, //whitebalance 1, //gain 1, //hue V4L2_EXPOSURE_AUTO, //exposure V4L2_EXPOSURE_APERTURE_PRIORITY, //exp priority (auto time, manual iris) 1 //focus }; /** *

Whether {@link #v4l2EnableAuto} was called more recently than {@link * #v4l2DisableAuto}.

**/ protected boolean v4l2Auto = false; /** OpenCV capgure property override or NaN to use default **/ public double brightness = Double.NaN, saturation = Double.NaN, hue = Double.NaN, gain = Double.NaN, exposure = Double.NaN; /** Capture dimensions. **/ public int width = DEF_CAM_W, height = DEF_CAM_H; /** Maximum frames per second. **/ public float maxFPS = DEF_MAX_FPS; /** Minimum inter-frame delay. **/ public int minDelayMS = DEF_MIN_DELAY_MS; /** Application name. **/ public String appname = DEF_APPNAME; /** Whether to open and use an OpenCV window. **/ public boolean useWindow = false; /** Whether to open and use a JavaCV CanvasFrame. **/ public boolean useCanvasFrame = true; /** Whether processing is currently paused. **/ public boolean paused = false; /** Whether debug is requested for next frame. **/ public boolean dbg = false; /** Strems for messages and warnings, or null to disable. **/ public PrintStream msgStream = System.out, warnStream = System.err; /** The OpenCV capture object, if used. **/ protected CvCapture cap = null; /** The JavaCV frame grabber, if used. **/ protected FrameGrabber grabber = null; /** The OpenCV capture stream index. **/ protected int streamIndex = 0; /** The file save image, if any (freed by destructor iff allocated). **/ protected IplImage saveImage = null; /** Most recently captured image, if any (do not mutate or free). **/ protected IplImage capImage = null; /** Most recently processed image, if any (do not free). **/ protected IplImage procImage = null; /** Most recent captured frame number. **/ protected int frameN = -1; /** Milliseconds per OpenCV clock tick. **/ protected double msPerTick = (1.0/1000.0)*(1.0/cvGetTickFrequency()); //cvGetTickFrequency() returns //ticks per microsecond /** Last frame end time. **/ protected double frameEndMS = Double.NaN; /** Mouse event callback object. **/ protected CvMouseCallback mouseCallback = new CvMouseCallback() { public void call(int event, int x, int y, int flags, Pointer param) { handleMouse(event, x, y, flags); } }; /** The JavaCV CanvasFrame, iff {@link #useCanvasFrame}. **/ protected CanvasFrame canvasFrame = null; /** Last KeyEvent on {@link #canvasFrame} iff {@link #useCanvasFrame} **/ protected volatile KeyEvent canvasFrameKeyEvent = null; /** Current time in milliseconds according to OpenCV clock. **/ protected double nowMS() { return cvGetTickCount()*msPerTick; } /** Display command line help. **/ protected void cmdHelp() { boolean cio = camIndexOptional(); msg(appname+" -?|-h -- display this help"); msg(appname+" "+(cio?"[":"")+"I[:S] [W H]"+(cio?"]":"")+ " "+cmdHelpExtParams()+ "-- use cv cam:stream I[:S] (def -1:0) [at WxH, def "+ width+"x"+height+"]"); msg(appname+" path "+cmdHelpExtParams()+ "-- read frames from video file at path"); cmdHelpExt(); } /** Whether the camera index is optional on the command line. **/ protected boolean camIndexOptional() { return true; } /** Display extra command line help, for subclasses. **/ protected void cmdHelpExt() {} /** Extra command line parameters, for subclasses. **/ protected String cmdHelpExtParams() { return ""; } /** Display GUI help. **/ protected void guiHelp() { msg("h -- display this help"); msg("q,ESC -- quit"); msg("SPACE -- toggle run/pause"); msg("g -- debug next frame"); msg("c -- save last captured image"); msg("p -- save last processed image"); msg("o -- dump capture properties"); msg("u -- toggle V4l2 auto camera props"); msg("[,] -- incr,decr V4l2 manual exposure time"); guiHelpExt(); } /** Display extra GUI help, for subclasses. **/ protected void guiHelpExt() {} /** Dump all {@link #CAP_PROPS} **/ public void dumpCaptureProperties() { if (cap != null) { msg("using OpenCV capture object"); for (int i = 0; i < CAP_PROPS.length; i++) { int prop = CAP_PROPS[i]; if (i >= CAP_PROP_NAMES.length) break; String name = CAP_PROP_NAMES[i]; double val = getCaptureProperty(cap, prop); if (val >= 0) msg(name+": "+fmt(val)); else msg("error getting capture "+name); } } else if (grabber != null) { msg("using JavaCV frame grabber"); msg("format: "+grabber.getFormat()); msg("image WxH: "+grabber.getImageWidth()+"x"+grabber.getImageHeight()); msg("framerate: "+grabber.getFrameRate()); msg("trigger mode: "+grabber.isTriggerMode()); msg("BPP: "+grabber.getBitsPerPixel()); msg("color mode: "+grabber.getImageMode()); msg("timeout: "+grabber.getTimeout()); msg("num buffers: "+grabber.getNumBuffers()); msg("gamma: "+grabber.getGamma()); msg("deinterlace: "+grabber.isDeinterlace()); } else msg("W: no capture object, cannot dump properties"); } /** *

Process the given frame.

* *

Default impl is identity.

* *

Note that the passed image may not be mutated. So to do any significant * processing, you will need to allocate your own return image. It is * typically best to do this once for the first frame, store the results in * subclass instance variables, and then deallocate any allocated space in * the subclass destructor.

* *

All frames are guaranteed to have the same dimensions and pixel * format.

* *

This will be called for every iteration of mainLoop(), even while * paused. You may use the various instance fields ({@link #frameN}, {@link * #capImage}, {@link #paused}, {@link #dbg}, etc) to determine what * operations to perform. Of course, when writing a subclass, you may also * add your own fields.

* * @return the processed image, null will cause image display to be skipped * for this frame **/ protected IplImage process(IplImage frame) { return frame; } /** *

Hook to check if {@link #mainLoop} should end.

* * @return true iff main loop should end * *

Default impl returns false.

**/ protected boolean doneProcessing() { return false; } /** *

Handle keypresses.

* * @return true to continue processing frames, false to end program **/ protected boolean handleKey(int code) { switch (code) { case 'h': case 'H': { guiHelp(); break; } case 27: case 'q': case 'Q': { msg("quit!"); return false; } case ' ': { paused = !paused; break; } case 'g': case 'G': { dbg = true; break; } case 'c': case 'C': { if (capImage == null) { msg("no capture image"); break; } String fn = appname+"-c"+frameN+".png"; msg("saving capture image to "+fn); if (!save(fn, capImage)) warn("error saving image"); break; } case 'p': case 'P': { if (procImage == null) {msg("no processed image"); break;} String fn = appname+"-p"+frameN+".png"; msg("saving processed image to "+fn); if (!save(fn, procImage)) warn("error saving image"); break; } case 'o': case 'O': dumpCaptureProperties(); break; case '[': { float ms = v4l2GetExposure(); if (ms >= 0) { float newMS = ms-0.1f; if (newMS < 0.1f) newMS = 0.1f; msg("changing exposure from "+fmt(ms)+" to "+fmt(newMS)); v4l2SetExposure(newMS); } else warn("error getting current exposure"); break; } case ']': { float ms = v4l2GetExposure(); if (ms >= 0) { float newMS = ms+0.1f; msg("changing exposure from "+fmt(ms)+" to "+fmt(newMS)); v4l2SetExposure(newMS); } else warn("error getting current exposure"); break; } case 'u': case 'U': { if (v4l2Auto) v4l2DisableAuto(); else v4l2EnableAuto(); break; } default: return handleKeyExt(code); } return true; } /** *

Keypresses not handled by the default implementation of handleKey() are * passed here.

* *

Default impl just prints the keycode.

* *

Overriding this is one way that subclasses can handle extra * keypresses.

**/ protected boolean handleKeyExt(int code) { msg("unhandled keycode "+code); return true; } /** *

Handle mouse events.

* * @param event one of the CV_EVENT_* constants (see opencv_highgui.java in * the JavaCV sources for a list) * @param x the x pixel coordinate of the mouse event * @param y the y pixel coordinate of the mouse event * @param flags bitmask of the CV_EVENT_FLAG_* constants (see * opencv_highgui.java in the JavaCV sources for a list) * *

Default impl prints a message using {@link #mouseEventToString} except * for CV_EVENT_MOUSEMOVE.

* *

Overriding this is one way that subclasses can handle mouse events.

**/ protected void handleMouse(int event, int x, int y, int flags) { if (event != CV_EVENT_MOUSEMOVE) msg(mouseEventToString(event, x, y, flags)); } /** forward an AWT mouse Event to {@link #handleMouse(int, int, int, int)} **/ protected void handleMouse(MouseEvent e) { if (e == null) return; int event = -1; switch(e.getID()) { case MOUSE_MOVED: event = CV_EVENT_MOUSEMOVE; break; case MOUSE_PRESSED: switch (e.getButton()) { case BUTTON1: event = CV_EVENT_LBUTTONDOWN; break; case BUTTON2: event = CV_EVENT_MBUTTONDOWN; break; case BUTTON3: event = CV_EVENT_RBUTTONDOWN; break; default: return; } break; case MOUSE_RELEASED: switch (e.getButton()) { case BUTTON1: event = CV_EVENT_LBUTTONUP; break; case BUTTON2: event = CV_EVENT_MBUTTONUP; break; case BUTTON3: event = CV_EVENT_RBUTTONUP; break; default: return; } break; default: return; } int flags = 0; int modifiers = e.getModifiersEx(); if ((modifiers&CTRL_DOWN_MASK) != 0) flags |= CV_EVENT_FLAG_CTRLKEY; if ((modifiers&ALT_DOWN_MASK) != 0)flags |= CV_EVENT_FLAG_ALTKEY; if ((modifiers&SHIFT_DOWN_MASK) != 0)flags |= CV_EVENT_FLAG_SHIFTKEY; if ((modifiers&BUTTON1_DOWN_MASK) != 0)flags |= CV_EVENT_FLAG_LBUTTON; if ((modifiers&BUTTON2_DOWN_MASK) != 0)flags |= CV_EVENT_FLAG_MBUTTON; if ((modifiers&BUTTON3_DOWN_MASK) != 0) flags |= CV_EVENT_FLAG_RBUTTON; switch (e.getButton()) { case BUTTON1: flags |= CV_EVENT_FLAG_LBUTTON; break; case BUTTON2: flags |= CV_EVENT_FLAG_MBUTTON; break; case BUTTON3: flags |= CV_EVENT_FLAG_RBUTTON; break; } handleMouse(event, e.getX(), e.getY(), flags); } /** *

Make a human-readable string summarizing a mouse event.

* *

See {@link #handleMouse}.

**/ public String mouseEventToString(int event, int x, int y, int flags) { String ev = "unknown"; switch (event) { case CV_EVENT_MOUSEMOVE: ev = "mousemove"; break; case CV_EVENT_LBUTTONDOWN: ev = "lbuttondown"; break; case CV_EVENT_RBUTTONDOWN: ev = "rbuttondown"; break; case CV_EVENT_MBUTTONDOWN: ev = "mbuttondown"; break; case CV_EVENT_LBUTTONUP: ev = "lbuttonup"; break; case CV_EVENT_RBUTTONUP: ev = "rbuttonup"; break; case CV_EVENT_MBUTTONUP: ev = "mbuttonup"; break; case CV_EVENT_LBUTTONDBLCLK: ev = "lbuttondblclk"; break; case CV_EVENT_RBUTTONDBLCLK: ev = "rbuttondblclk"; break; case CV_EVENT_MBUTTONDBLCLK: ev = "mbuttondblclk"; break; } String fl = ""; if ((flags&CV_EVENT_FLAG_LBUTTON) != 0) fl += "lb "; if ((flags&CV_EVENT_FLAG_RBUTTON) != 0) fl += "rb "; if ((flags&CV_EVENT_FLAG_MBUTTON) != 0) fl += "mb "; if ((flags&CV_EVENT_FLAG_CTRLKEY) != 0) fl += "ctrl "; if ((flags&CV_EVENT_FLAG_SHIFTKEY) != 0) fl += "shift "; if ((flags&CV_EVENT_FLAG_ALTKEY) != 0) fl += "alt "; return ev+" ("+x+", "+y+") "+fl; } /** *

Save an image to file using the OpenCV API.

* * @return true on success. **/ boolean save(String filename, IplImage image){ //(re-)allocate saveImage if necessary, since cvSaveImage can handle only //8-bit 1 or 3 channel images (it also assumes BGR order in the latter //case) if ((image.depth() != IPL_DEPTH_8U) || ((image.nChannels() != 1) && (image.nChannels() != 3))) { int w = image.width(), h = image.height(); int nc = (image.nChannels() > 1) ? 3 : 1; if ((saveImage == null) || (saveImage.width() != w) || (saveImage.height() != h) || (saveImage.nChannels() != nc)) { if (saveImage != null) saveImage.release(); saveImage = IplImage.create(cvSize(w, h), IPL_DEPTH_8U, nc); } } if (saveImage != null) { cvConvertImage(image, saveImage, 0); image = saveImage; } return (cvSaveImage(filename, image) != 0); } /** *

Constructor initializes members.

* *

Call {@link #init(int, String[])} and then {@link #mainLoop} to run the * default application.

**/ public CvBase(String appname) { this.appname = (appname != null) ? appname : getDefAppname(); } /** Uses {@link #DEF_APPNAME}. **/ public CvBase() { this(null); } /** Calls {@link #release}. **/ public void finalize() { release(); } /** Frees memory, closes windows, and releases resources. **/ public void release() { if (saveImage != null) { saveImage.release(); saveImage = null; } if (cap != null) { cvReleaseCapture(cap); cap = null; } if (grabber != null) { try { grabber.stop(); } catch (Exception e) { warn("W: error stopping JavaCV frame grabber: "+e.getMessage()); } grabber = null; } if (canvasFrame != null) { canvasFrame.dispose(); canvasFrame = null; } canvasFrameKeyEvent = null; } /** *

Initialize members based on command line arguments.

* *

In particular, this constructs {@link #cap} or {@link #grabber} (see * {@link #makeGrabber}) and opens the main window (named {@link * #appname}).

* * @return the number of arguments eaten **/ public int init(int argc, String argv[]) { msg("OpenCV "+CV_VERSION+ " ("+CV_MAJOR_VERSION+"."+CV_MINOR_VERSION+"."+ CV_SUBMINOR_VERSION+")"); msg("JavaCV build timestamp "+ JavaCV.class.getPackage().getImplementationVersion()); int ate = 0; for (int i = 0; i < argc; i++) if (("-h".equals(argv[i])) || ("-?".equals(argv[i]))) { cmdHelp(); System.exit(0); } String input = (argc >= 1) ? argv[0] : getDefInput(); if (argc >= 1) ate++; int index = -2; try { int ci = input.indexOf(':'); if (ci >= 0) { index = Integer.parseInt(input.substring(0,ci)); streamIndex = Integer.parseInt(input.substring(ci+1)); } else index = Integer.parseInt(input); //either no args or first arg parsed as an int; open a camera int w = width, h = height; if (argc >= 3) { try { w = Integer.parseInt(argv[1]); ate++; try { h = Integer.parseInt(argv[2]); ate++; } catch (NumberFormatException nfeh) {} } catch (NumberFormatException nfew) {} } //override OpenCV cap obj codepath with JavaCV grabber grabber = makeGrabber(index, streamIndex); if (grabber == null) cap = cvCreateCameraCapture(index); if ((grabber == null) && (cap == null)) { msg("error opening camera "+index+":"+streamIndex); System.exit(-1); } if (cap != null) { int rw = setCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH, w); int rh = setCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT, h); if ((rw < 0) || (rh < 0)) { warn("error setting camera to "+w+"x"+h); width = (int) getCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH); height = (int) getCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT); } for (int i = 0; i < CAP_PROPS.length; i++) { int prop = CAP_PROPS[i]; if ((prop == CV_CAP_PROP_FRAME_WIDTH) || (prop == CV_CAP_PROP_FRAME_HEIGHT)) continue; if (i >= CAP_PROP_NAMES.length) break; String name = CAP_PROP_NAMES[i]; try { double val = getClass().getField(name).getDouble(this); if (!Double.isNaN(val)) { if (setCaptureProperty(cap, CAP_PROPS[i], val) < 0) warn("error setting "+name+" to "+fmt(val)); else msg("set "+name+" to "+fmt(val)); } } catch (IllegalAccessException e) { } catch (NoSuchFieldException e) { } } } msg("reading frames from "+ ((index >= 0) ? ("camera "+index) : "first available camera")+ ", stream "+streamIndex); } catch (NumberFormatException nfe) { //input specifier did not parse as an int, try to open it as a video file if ((cap = cvCreateFileCapture(input)) == null){ msg("error opening file \""+input+"\""); System.exit(-1); } width = (int) getCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH); height = (int) getCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT); msg("reading frames from file \""+input+"\""); } if (useWindow) { if (cvStartWindowThread() == 0) warn("error starting window thread"); if (cvNamedWindow(appname, CV_WINDOW_AUTOSIZE) == 0) warn("error opening window"); cvMoveWindow(appname, 0, 0); cvSetMouseCallback(appname, mouseCallback, null); } if (useCanvasFrame) { canvasFrame = new CanvasFrame(appname); Canvas c = canvasFrame.getCanvas(); c.addKeyListener(new KeyAdapter() { public void keyTyped(KeyEvent e) { if (e.getID() == KEY_TYPED) { if (canvasFrameKeyEvent == null) { canvasFrameKeyEvent = e; } else warn("dropped key "+e.getKeyChar()); } } }); c.addMouseListener(new MouseAdapter() { public void mousePressed(MouseEvent e) { handleMouse(e); } public void mouseReleased(MouseEvent e) { handleMouse(e); } }); c.addMouseMotionListener(new MouseAdapter() { public void mouseMoved(MouseEvent e) { handleMouse(e); } }); } return initExt(argc, argv, ate); } /** *

Hook to allow a JavaCV FrameGrabber to replace the usual OpenCV capture * object.

* * @param cameraIndex the OpenCV camera index * @param streamIndex the OpenCV stream index * * @return non-null to use a FrameGrabber * *

Default impl only takes action if cameraIndex is in the range 950-999, * which is the upper half of the range reserved for CV_CAP_OPENNI=900. In * that case we try to use OpenKinectFrameGrabber, which uses libfreenect, if * possible, instead of OpenCV which uses OpenNI. This only works if * streamIndex is CV_CAP_OPENNI_DEPTH_MAP=0 or CV_CAP_OPENNI_BGR_IMAGE=5.

* *

The stream index sets the default capture image type (the * OpenKinectFrameGrabber "format" which can be either "depth" or "video"); * the other image type an then be acquired by calling * * ((OpenKinectFrameGrabber) grabber).grabDepth() * * or * * ((OpenKinectFrameGrabber) grabber).grabVideo() * * specifically, or by changing the grabber format.

**/ protected FrameGrabber makeGrabber(int cameraIndex, int streamIndex) { FrameGrabber grabber = null; if ((cameraIndex >= CV_CAP_OPENNI+50) && (cameraIndex < (CV_CAP_OPENNI+100))) { switch (streamIndex) { case CV_CAP_OPENNI_DEPTH_MAP: grabber = new OpenKinectFrameGrabber(cameraIndex-CV_CAP_OPENNI-50); grabber.setFormat("depth"); grabber.setImageMode(FrameGrabber.ImageMode.RAW); break; case CV_CAP_OPENNI_BGR_IMAGE: grabber = new OpenKinectFrameGrabber(cameraIndex-CV_CAP_OPENNI-50); grabber.setFormat("video"); grabber.setImageMode(FrameGrabber.ImageMode.RAW); break; } } if (grabber != null) { try { grabber.start(); } catch (Exception e) { warn("error starting JavaCV frame grabber: "+e.getMessage()); } } return grabber; } /** calls {@link #init(int, String[])} with no args **/ public int init() { return init(0, new String[0]); } /** calls {@link #init(int, String[])} with video file name **/ public int init(String fname) { return init(1, new String[] {fname}); } /** calls {@link #init(int, String[])} with cam index and dimensions **/ public int init(int camIndex, int w, int h) { return init(3, new String[] {Integer.toString(camIndex), Integer.toString(w), Integer.toString(h)}); } /** *

Extra initialization, for subclasses.

* * @param ate the number of command line arguments already eaten by {@link * #init(int, String[])} * * @return the total number of arguments eaten **/ public int initExt(int argc, String argv[], int ate) { return ate; } /** *

Main frame processing loop.

* *

Default impl acquires a new frame from {@link #cap} {@link * #streamIndex} or {@link #grabber}, calls {@link #process}, displays the * result (if any), quits if {@link #doneProcessing}, otherwise delays for * the remainder of the minimum frame time (inverse of {@link #maxFPS}), * handles user input, and then loops.

**/ public void mainLoop() { double frameStartMS, startMS; int minFrameMS = (int) (1000.0/maxFPS); frameN = -1; for (;;) { if (doneProcessing()) return; frameStartMS = nowMS(); if (!paused) { if (dbg) msg("-- capturing frame "+frameN); startMS = nowMS(); if (cap != null) { cvGrabFrame(cap); capImage = cvRetrieveFrame(cap, streamIndex); } else if (grabber != null) { try { capImage = grabber.grab(); } catch (Exception e) { warn("error grabbing frame with JavaCV: "+e.getMessage()); } } if (capImage != null) { width = capImage.width(); height = capImage.height(); if (dbg) msg("captured "+width+"x"+height+ " frame ("+fmt(nowMS()-startMS)+"ms)"); if (frameN == 0) msg("capture size "+width+"x"+height+ ", "+capImage.nChannels()+" channels"+ ", "+((capImage.depth())&(~IPL_DEPTH_SIGN))+" bits"+ ", "+((((capImage.depth())&IPL_DEPTH_SIGN) != 0) ? "signed" : "unsigned")); frameN++; } else { warn("error capturing frame"); } } else if (dbg) msg("-- paused on frame "+frameN); if (dbg) msg("processing..."); startMS = nowMS(); procImage = process(capImage); if (dbg) { if (procImage != null) msg("processed into "+procImage.width()+"x"+procImage.height()+ " image ("+fmt(nowMS()-startMS)+"ms)"); else msg("process returned null image ("+fmt(nowMS()-startMS)+"ms)"); } if ((useWindow && (cvGetWindowHandle(appname) == null)) || (useCanvasFrame && (!canvasFrame.isVisible()))) { msg("main window closed, exiting"); break; } if (procImage != null) { //note: small race condition here; if user closes window now, it will be //reopened when we call cvShowImage() if (dbg) msg("displaying processed image"); startMS = nowMS(); //note: if necessary, this call will deal with whatever bit depth or //number of channels the image may have (it will assume BGR order for 3 //channel images) if (useWindow) cvShowImage(appname, procImage); if (useCanvasFrame) canvasFrame.showImage(procImage); if (dbg) msg("displayed "+procImage.width()+"x"+procImage.height() +" image ("+fmt(nowMS()-startMS)+"ms)"); } double lastFrameEndMS = frameEndMS; frameEndMS = nowMS(); if (dbg) msg("frame time "+fmt(frameEndMS - frameStartMS)+"ms"); int waitUntilMS = (int) (frameStartMS + minFrameMS); if (waitUntilMS < frameEndMS) waitUntilMS = (int) (nowMS()+minDelayMS); if (dbg) msg("-- finished frame "+(paused ? frameN : (frameN-1)) +" ("+fmt(1000.0/(frameEndMS-lastFrameEndMS))+" FPS)"); boolean wasDbg = dbg; if (doneProcessing()) return; startMS = nowMS(); for (int w = (int) (waitUntilMS-nowMS()); w > 0; w = (int) (waitUntilMS-nowMS())) { if (dbg) msg("waiting "+w+"ms (or until keypress)"); try { int c = waitForKeypresss(w); if (c < 0) break; //cvWaitKey timed out if (!handleKey(c)) { System.out.flush(); return; } } catch (InterruptedException e) { System.out.flush(); return; } } System.out.flush(); if (Thread.interrupted()) return; if (wasDbg) { msg("waited "+fmt(nowMS()-startMS)+"ms"); dbg = false; } } } /** *

Wait for a keypress.

* * @param timeoutMS maximum time to wait or 0 to wait forever * *

Returns when the first keypress is detected in the OpenCV window iff * {@link #useWindow}, and any {@link #canvasFrame} iff {@link * #useCanvasFrame}.

**/ public int waitForKeypresss(int timeoutMS) throws InterruptedException { int c = -1; double deadlineMS = nowMS() + timeoutMS; while ((c < 0) && ((timeoutMS == 0) || (nowMS() < deadlineMS))) { if (useWindow) c = cvWaitKey(minDelayMS); if (c > 0) break; KeyEvent ke = canvasFrameKeyEvent; //volatile if (useCanvasFrame && (ke != null)) { canvasFrameKeyEvent = null; c = ke.getKeyChar(); break; } if (c > 0) break; } return c; } /** *

Try to disable all auto stuff for a V4L2 camera.

* *

Call e.g. from {@link #initExt}.

**/ public void v4l2DisableAuto() { if (cap == null) { warn("no capture obj, cannot disable auto"); return; } for (int i = 0; i < V4L2_AUTO_PROPS.length; i++) { int prop = V4L2_AUTO_PROPS[i], val = V4L2_AUTO_PROP_OFF_VALS[i]; String name = V4L2_AUTO_PROP_NAMES[i]; if (setCaptureProperty(cap, prop, val) < 0) warn("error disabling "+name); else msg("disabled "+name); } v4l2Auto = false; } /** *

Try to enable all auto stuff for a V4L2 camera.

* *

Call e.g. from {@link #initExt}.

**/ public void v4l2EnableAuto() { if (cap == null) { warn("W: no capture obj, cannot enable auto"); return; } for (int i = 0; i < V4L2_AUTO_PROPS.length; i++) { int prop = V4L2_AUTO_PROPS[i], val = V4L2_AUTO_PROP_ON_VALS[i]; String name = V4L2_AUTO_PROP_NAMES[i]; if (setCaptureProperty(cap, prop, val) < 0) warn("error enabling "+name); else msg("enabled "+name); } v4l2Auto = true; } /** *

Try to set the exposure time on a V4L2 camera.

* * @param ms the new exposure time in milliseconds (0.1ms resolution) if * positive, else set auto exposure **/ public void v4l2SetExposure(float ms) { if (cap == null) { warn("W: no capture obj, cannot set exposure"); return; } if (ms > 0) { int tms = (int) (ms*10); if (setCaptureProperty(cap, V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_MANUAL) < 0) warn("error enabling manual exposure"); else msg("enabled manual exposure"); if (setCaptureProperty(cap, V4L2_CID_EXPOSURE_ABSOLUTE, tms) < 0) warn("error setting exposure to "+fmt(tms/10f)+"ms"); else msg("set exposure to "+fmt(tms/10f)+"ms"); } else { if (setCaptureProperty(cap, V4L2_CID_EXPOSURE_AUTO, V4L2_EXPOSURE_AUTO) < 0) warn("error enabling auto exposure"); else msg("enabled auto exposure"); if (setCaptureProperty(cap, V4L2_CID_EXPOSURE_AUTO_PRIORITY, V4L2_EXPOSURE_APERTURE_PRIORITY) < 0) warn("error enabling exposure aperture priority"); else msg("enabled exposure aperture priority"); } } /** *

Try to get the exposure time on a V4L2 camera.

* * @return the exposure time in milliseconds (0.1ms resolution) or -1 if it * cannot be determined **/ public float v4l2GetExposure() { return (cap != null) ? ((float) getCaptureProperty(cap, V4L2_CID_EXPOSURE_ABSOLUTE)) : -1; } /** *

Wraps cvSetCaptureProperty(), catches RuntimeException.

**/ public int setCaptureProperty(CvCapture cap, int id, double value) { try { return cvSetCaptureProperty(cap, id, value); } catch (RuntimeException e) { warn("runtime exception setting capture property "+id+" to "+value); return -1; } } /** *

Wraps cvGetCaptureProperty(), catches RuntimeException.

**/ public double getCaptureProperty(CvCapture cap, int id) { try { return cvGetCaptureProperty(cap, id); } catch (RuntimeException e) { warn("runtime exception getting capture property "+id); return -1; } } /** Display a message to {@link #msgStream}, if any. **/ protected void msg(String m) { if (msgStream != null) msgStream.println(getClass().getSimpleName()+" "+m); } /** Display a warning to {@link #warnStream}, if any. **/ protected void warn(String m) { if (warnStream != null) warnStream.println(getClass().getSimpleName()+" "+m); } /** Gets the default appname, may be overridden. **/ protected String getDefAppname() { return DEF_APPNAME; } /** Gets the default input, may be overridden. **/ protected String getDefInput() { return DEF_INPUT; } }