From e665da4376d9899158c473d4b9ac69917181034b Mon Sep 17 00:00:00 2001 From: Brian Lewis Date: Sat, 20 Jul 2013 21:36:05 -0500 Subject: [PATCH 1/3] tr '\r' '\n' VideoStream.java --- .../src/org/openni/VideoStream.java | 398 +++++++++++++++++- 1 file changed, 397 insertions(+), 1 deletion(-) diff --git a/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java b/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java index 7acd1517..d7e2020c 100644 --- a/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java +++ b/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java @@ -1 +1,397 @@ -package org.openni; import java.util.Map; import java.util.HashMap; import java.util.Iterator; /** * The {@link VideoStream} object encapsulates a single video stream from a device. Once created, it * is used to start data flow from the device, and to read individual frames of data. This is the * central class used to obtain data in OpenNI. It provides the ability to manually read data in a * polling loop, as well as providing events and a Listener class that can be used to implement * event-driven data acquisition. * * Aside from the video data frames themselves, the class offers a number of functions used for * obtaining information about a {@link VideoStream}. Field of view, available video modes, and * minimum and maximum valid pixel values can all be obtained. * * In addition to obtaining data, the {@link VideoStream} object is used to set all configuration * properties that apply to a specific stream (rather than to an entire device). In particular, it * is used to control cropping, mirroring, and video modes. * * A valid, initialized device that provides the desired stream type is required to create a stream. * * Several video streams can be created to stream data from the same sensor. This is useful if * several components of an application need to read frames separately. * * While some device might allow different streams from the same sensor to have different * configurations, most devices will have a single configuration for the sensor, shared by all * streams. */ public class VideoStream { /** * The {@link CameraSettings} object encapsulates camera setting for a single device. Once * created, it is used to get and set auto exposure and auto white balance modes. */ public class CameraSettings { /** * Set Auto Exposure Enabled for corresponding sensor * * @param enabled boolean value for set and unset auto exposure mode */ public void setAutoExposureEnabled(boolean enabled) { NativeMethods.checkReturnStatus(NativeMethods.oniStreamSetProperty(mVideoStream.getHandle(), NativeMethods.STREAM_PROPERTY_AUTO_EXPOSURE, enabled)); } /** * Set Auto White Balance for corresponding sensor * * @param enabled boolean value for set and unset auto white balance mode */ public void setAutoWhiteBalanceEnabled(boolean enabled) { NativeMethods.checkReturnStatus(NativeMethods.oniStreamSetProperty(mVideoStream.getHandle(), NativeMethods.STREAM_PROPERTY_AUTO_WHITE_BALANCE, enabled)); } /** * Set Auto Exposure Enabled for corresponding sensor * * @return enabled boolean value which define auto exposure mode state */ public boolean getAutoExposureEnabled() { OutArg val = new OutArg(); NativeMethods.oniStreamGetBoolProperty(mVideoStream.getHandle(), NativeMethods.STREAM_PROPERTY_AUTO_EXPOSURE, val); return val.mValue; } /** * Set Auto White Balance Enabled for corresponding sensor * * @return enabled boolean value which define auto white balance mode state */ public boolean getAutoWhiteBalanceEnabled() { OutArg val = new OutArg(); NativeMethods.oniStreamGetBoolProperty(mVideoStream.getHandle(), NativeMethods.STREAM_PROPERTY_AUTO_WHITE_BALANCE, val); return val.mValue; } private CameraSettings(VideoStream videoStream) { this.mVideoStream = videoStream; } final private VideoStream mVideoStream; } /** * The VideoStream::NewFrameListener interface is provided to allow the implementation of event * driven frame reading. To use it, create a class that inherits from it and implement override * the onNewFrame() method. Then, register your created class with an active {@link VideoStream} * using the {@link #addNewFrameListener(org.openni.VideoStream.NewFrameListener)} function. Once * this is done, the event handler function you implemented will be called whenever a new frame * becomes available. You may call {@link org.openni.VideoStream#readFrame()} from within the * event handler. */ public interface NewFrameListener { /** * Derived classes should implement this function to handle new frames. */ public void onFrameReady(VideoStream stream); } /** * Creates a stream of frames from a specific sensor type of a specific device. You must supply a * reference to a Device that supplies the sensor type requested. You can use * {@link org.openni.Device#hasSensor(SensorType)} to check whether a given sensor is available on * your target device before calling create(). * * @param device A reference to the {@link Device} you want to create the stream on. * @param sensorType The type of sensor the stream should produce data from. */ public static VideoStream create(Device device, SensorType sensorType) { VideoStream videoStream = new VideoStream(sensorType); if (mFrameListeners == null) mFrameListeners = new HashMap(); NativeMethods.checkReturnStatus(NativeMethods.oniDeviceCreateStream(device.getHandle(), sensorType.toNative(), videoStream)); return videoStream; } /** * Destroy this stream. This function is currently called automatically by the destructor, but it * is considered a best practice for applications to manually call this function on any * {@link VideoStream} that they call create() for. */ public void destroy() { NativeMethods.oniStreamDestroy(getHandle(), mCallbackHandle); mStreamHandle = 0; } /** * Provides the {@link SensorInfo} object associated with the sensor that is producing this * {@link VideoStream}. * * {@link SensorInfo} is useful primarily as a means of learning which video modes are valid for * this VideoStream. * * @return SensorInfo object associated with the sensor providing this stream. */ public final SensorInfo getSensorInfo() { return NativeMethods.oniStreamGetSensorInfo(getHandle()); } /** * Starts data generation from this video stream. */ public void start() { NativeMethods.checkReturnStatus(NativeMethods.oniStreamStart(getHandle())); } /** * Stops data generation from this video stream. */ public void stop() { NativeMethods.oniStreamStop(getHandle()); } /** * Read the next frame from this video stream, delivered as a {@link VideoFrameRef}. This is the * primary method for manually obtaining frames of video data. If no new frame is available, the * call will block until one is available. To avoid blocking, use * {@link VideoStream.NewFrameListener} to implement an event driven architecture. Another * alternative is to use {@link org.openni.OpenNI#waitForAnyStream(java.util.List, int)} to wait * for new frames from several streams * * @return VideoFrameRef object which hold the data of the new frame. */ public VideoFrameRef readFrame() { OutArg frame = new OutArg(); NativeMethods.checkReturnStatus(NativeMethods.oniStreamReadFrame(getHandle(), frame)); return frame.mValue; } /** * Adds a new Listener to receive this VideoStream onNewFrame event. See * {@link VideoStream.NewFrameListener} for more information on implementing an event driven * frame reading architecture. * * @param streamListener Object which implements {@link VideoStream.NewFrameListener} that will * respond to this event. */ public void addNewFrameListener(NewFrameListener streamListener) { mFrameListeners.put(this, streamListener); } /** * Removes a Listener from this video stream list. The listener removed will no longer receive new * frame events from this stream. * * @param streamListener Object of the listener to be removed. */ public void removeNewFrameListener(NewFrameListener streamListener) { for (Map.Entry pairs : mFrameListeners.entrySet()) { VideoStream videoStream = pairs.getKey(); if (videoStream.getHandle() == mStreamHandle) { if (streamListener.equals(pairs.getValue())) { mFrameListeners.remove(pairs.getKey()); return; } } } } /** * This function return stream handle. * * @return OpenNI stream handle. */ public long getHandle() { return mStreamHandle; } /** * Gets an object through which several camera settings can be configured. * * @return null if the stream doesn't support camera settings. */ public CameraSettings getCameraSettings() { if (NativeMethods.oniStreamIsPropertySupported(getHandle(), NativeMethods.STREAM_PROPERTY_AUTO_EXPOSURE) && NativeMethods.oniStreamIsPropertySupported(getHandle(), NativeMethods.STREAM_PROPERTY_AUTO_WHITE_BALANCE)) { return new CameraSettings(this); } return null; } /** * Get the current video mode information for this video stream. This includes its resolution, fps * and stream format. * * @return Current video mode information for this video stream. */ public final VideoMode getVideoMode() { OutArg videoMode = new OutArg(); NativeMethods.checkReturnStatus(NativeMethods.getVideoMode(getHandle(), videoMode)); return videoMode.mValue; } /** * Changes the current video mode of this stream. Recommended practice is to use * {@link org.openni.Device#getSensorInfo(SensorType)}, and then * {@link org.openni.SensorInfo#getSupportedVideoModes()} to obtain a list of valid video mode * settings for this stream. Then, pass a valid {@link VideoMode} to setVideoMode(VideoMode) to * ensure correct operation. * * @param videoMode Desired new video mode for this stream. returns Status code indicating success * or failure of this operation. */ public void setVideoMode(VideoMode videoMode) { NativeMethods.checkReturnStatus(NativeMethods.setVideoMode(getHandle(), videoMode .getResolutionX(), videoMode.getResolutionY(), videoMode.getFps(), videoMode .getPixelFormat().toNative())); } /** * Provides the maximum possible value for pixels obtained by this stream. This is most useful for * getting the maximum possible value of depth streams. * * @return Maximum possible pixel value. */ public int getMaxPixelValue() { OutArg val = new OutArg(); NativeMethods .oniStreamGetIntProperty(getHandle(), NativeMethods.STREAM_PROPERTY_MAX_VALUE, val); return val.mValue; } /** * Provides the smallest possible value for pixels obtains by this VideoStream. This is most * useful for getting the minimum possible value that will be reported by a depth stream. * * @return Minimum possible pixel value that can come from this stream. */ public int getMinPixelValue() { OutArg val = new OutArg(); NativeMethods.checkReturnStatus(NativeMethods.oniStreamGetIntProperty(getHandle(), NativeMethods.STREAM_PROPERTY_MIN_VALUE, val)); return val.mValue; } /** * Checks whether this stream supports cropping. * * @return true if the stream supports cropping, false if it does not. */ public boolean isCroppingSupported() { return NativeMethods.oniStreamIsPropertySupported(getHandle(), NativeMethods.STREAM_PROPERTY_CROPPING); } /** * Obtains the current cropping settings for this stream. * * @return CropArea CropArea object which encapsulated cropping info. */ public CropArea getCropping() { OutArg xRes = new OutArg(); OutArg yRes = new OutArg(); OutArg w = new OutArg(); OutArg h = new OutArg(); NativeMethods.checkReturnStatus(NativeMethods.getCropping(getHandle(), xRes, yRes, w, h)); return new CropArea(xRes.mValue, yRes.mValue, w.mValue, h.mValue); } /** * Changes the cropping settings for this stream. You can use the {@link #isCroppingSupported()} * function to make sure cropping is supported before calling this function. * * @param cropping CropArea object which set corresponding cropping information. */ public void setCropping(CropArea cropping) { NativeMethods.checkReturnStatus(NativeMethods.setCropping(getHandle(), cropping.getOriginX(), cropping.getOriginY(), cropping.getWidth(), cropping.getHeight())); } /** * Disables cropping. */ public void resetCropping() { NativeMethods.checkReturnStatus(NativeMethods.resetCropping(getHandle())); } /** * Check whether mirroring is currently turned on for this stream. * * @return true if mirroring is currently enabled, false otherwise. */ public boolean getMirroringEnabled() { OutArg val = new OutArg(); NativeMethods.checkReturnStatus(NativeMethods.oniStreamGetBoolProperty(getHandle(), NativeMethods.STREAM_PROPERTY_MIRRORING, val)); return val.mValue; } /** * Enable or disable mirroring for this stream. * * @param isEnabled true to enable mirroring, false to disable it. */ public void setMirroringEnabled(boolean isEnabled) { NativeMethods.checkReturnStatus(NativeMethods.oniStreamSetProperty(getHandle(), NativeMethods.STREAM_PROPERTY_MIRRORING, isEnabled)); } /** * Gets the horizontal field of view of frames received from this stream. * * @return Horizontal field of view, in radians. */ public float getHorizontalFieldOfView() { OutArg val = new OutArg(); NativeMethods.checkReturnStatus(NativeMethods.oniStreamGetFloatProperty(getHandle(), NativeMethods.STREAM_PROPERTY_HORIZONTAL_FOV, val)); return val.mValue; } /** * Gets the vertical field of view of frames received from this stream. * * @return Vertical field of view, in radians. */ public float getVerticalFieldOfView() { OutArg val = new OutArg(); NativeMethods.checkReturnStatus(NativeMethods.oniStreamGetFloatProperty(getHandle(), NativeMethods.STREAM_PROPERTY_VERTICAL_FOV, val)); return val.mValue; } /** * Gets the sensor type for this stream. * * @return sensor type. */ public SensorType getSensorType() { return mSensorType; } private VideoStream(SensorType sensorType) { this.mSensorType = sensorType; } private static void onFrameReady(long streamHandle) { for (Map.Entry pairs : mFrameListeners.entrySet()) { VideoStream videoStream = pairs.getKey(); if (videoStream.getHandle() == streamHandle) { pairs.getValue().onFrameReady(videoStream); } } } private final SensorType mSensorType; private static HashMap mFrameListeners; private long mStreamHandle; private long mCallbackHandle; } \ No newline at end of file +package org.openni; + +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; + +/** + * The {@link VideoStream} object encapsulates a single video stream from a device. Once created, it + * is used to start data flow from the device, and to read individual frames of data. This is the + * central class used to obtain data in OpenNI. It provides the ability to manually read data in a + * polling loop, as well as providing events and a Listener class that can be used to implement + * event-driven data acquisition. + * + * Aside from the video data frames themselves, the class offers a number of functions used for + * obtaining information about a {@link VideoStream}. Field of view, available video modes, and + * minimum and maximum valid pixel values can all be obtained. + * + * In addition to obtaining data, the {@link VideoStream} object is used to set all configuration + * properties that apply to a specific stream (rather than to an entire device). In particular, it + * is used to control cropping, mirroring, and video modes. + * + * A valid, initialized device that provides the desired stream type is required to create a stream. + * + * Several video streams can be created to stream data from the same sensor. This is useful if + * several components of an application need to read frames separately. + * + * While some device might allow different streams from the same sensor to have different + * configurations, most devices will have a single configuration for the sensor, shared by all + * streams. + */ +public class VideoStream { + /** + * The {@link CameraSettings} object encapsulates camera setting for a single device. Once + * created, it is used to get and set auto exposure and auto white balance modes. + */ + public class CameraSettings { + /** + * Set Auto Exposure Enabled for corresponding sensor + * + * @param enabled boolean value for set and unset auto exposure mode + */ + public void setAutoExposureEnabled(boolean enabled) { + NativeMethods.checkReturnStatus(NativeMethods.oniStreamSetProperty(mVideoStream.getHandle(), + NativeMethods.STREAM_PROPERTY_AUTO_EXPOSURE, enabled)); + } + + /** + * Set Auto White Balance for corresponding sensor + * + * @param enabled boolean value for set and unset auto white balance mode + */ + public void setAutoWhiteBalanceEnabled(boolean enabled) { + NativeMethods.checkReturnStatus(NativeMethods.oniStreamSetProperty(mVideoStream.getHandle(), + NativeMethods.STREAM_PROPERTY_AUTO_WHITE_BALANCE, enabled)); + } + + /** + * Set Auto Exposure Enabled for corresponding sensor + * + * @return enabled boolean value which define auto exposure mode state + */ + public boolean getAutoExposureEnabled() { + OutArg val = new OutArg(); + NativeMethods.oniStreamGetBoolProperty(mVideoStream.getHandle(), + NativeMethods.STREAM_PROPERTY_AUTO_EXPOSURE, val); + return val.mValue; + } + + /** + * Set Auto White Balance Enabled for corresponding sensor + * + * @return enabled boolean value which define auto white balance mode state + */ + public boolean getAutoWhiteBalanceEnabled() { + OutArg val = new OutArg(); + NativeMethods.oniStreamGetBoolProperty(mVideoStream.getHandle(), + NativeMethods.STREAM_PROPERTY_AUTO_WHITE_BALANCE, val); + return val.mValue; + } + + private CameraSettings(VideoStream videoStream) { + this.mVideoStream = videoStream; + } + + final private VideoStream mVideoStream; + } + /** + * The VideoStream::NewFrameListener interface is provided to allow the implementation of event + * driven frame reading. To use it, create a class that inherits from it and implement override + * the onNewFrame() method. Then, register your created class with an active {@link VideoStream} + * using the {@link #addNewFrameListener(org.openni.VideoStream.NewFrameListener)} function. Once + * this is done, the event handler function you implemented will be called whenever a new frame + * becomes available. You may call {@link org.openni.VideoStream#readFrame()} from within the + * event handler. + */ + public interface NewFrameListener { + /** + * Derived classes should implement this function to handle new frames. + */ + public void onFrameReady(VideoStream stream); + } + + /** + * Creates a stream of frames from a specific sensor type of a specific device. You must supply a + * reference to a Device that supplies the sensor type requested. You can use + * {@link org.openni.Device#hasSensor(SensorType)} to check whether a given sensor is available on + * your target device before calling create(). + * + * @param device A reference to the {@link Device} you want to create the stream on. + * @param sensorType The type of sensor the stream should produce data from. + */ + public static VideoStream create(Device device, SensorType sensorType) { + VideoStream videoStream = new VideoStream(sensorType); + if (mFrameListeners == null) mFrameListeners = new HashMap(); + NativeMethods.checkReturnStatus(NativeMethods.oniDeviceCreateStream(device.getHandle(), + sensorType.toNative(), videoStream)); + return videoStream; + } + + /** + * Destroy this stream. This function is currently called automatically by the destructor, but it + * is considered a best practice for applications to manually call this function on any + * {@link VideoStream} that they call create() for. + */ + public void destroy() { + NativeMethods.oniStreamDestroy(getHandle(), mCallbackHandle); + mStreamHandle = 0; + } + + /** + * Provides the {@link SensorInfo} object associated with the sensor that is producing this + * {@link VideoStream}. + * + * {@link SensorInfo} is useful primarily as a means of learning which video modes are valid for + * this VideoStream. + * + * @return SensorInfo object associated with the sensor providing this stream. + */ + public final SensorInfo getSensorInfo() { + return NativeMethods.oniStreamGetSensorInfo(getHandle()); + } + + /** + * Starts data generation from this video stream. + */ + public void start() { + NativeMethods.checkReturnStatus(NativeMethods.oniStreamStart(getHandle())); + } + + /** + * Stops data generation from this video stream. + */ + public void stop() { + NativeMethods.oniStreamStop(getHandle()); + } + + /** + * Read the next frame from this video stream, delivered as a {@link VideoFrameRef}. This is the + * primary method for manually obtaining frames of video data. If no new frame is available, the + * call will block until one is available. To avoid blocking, use + * {@link VideoStream.NewFrameListener} to implement an event driven architecture. Another + * alternative is to use {@link org.openni.OpenNI#waitForAnyStream(java.util.List, int)} to wait + * for new frames from several streams + * + * @return VideoFrameRef object which hold the data of the new frame. + */ + public VideoFrameRef readFrame() { + OutArg frame = new OutArg(); + NativeMethods.checkReturnStatus(NativeMethods.oniStreamReadFrame(getHandle(), frame)); + return frame.mValue; + } + + /** + * Adds a new Listener to receive this VideoStream onNewFrame event. See + * {@link VideoStream.NewFrameListener} for more information on implementing an event driven + * frame reading architecture. + * + * @param streamListener Object which implements {@link VideoStream.NewFrameListener} that will + * respond to this event. + */ + public void addNewFrameListener(NewFrameListener streamListener) { + mFrameListeners.put(this, streamListener); + } + + /** + * Removes a Listener from this video stream list. The listener removed will no longer receive new + * frame events from this stream. + * + * @param streamListener Object of the listener to be removed. + */ + public void removeNewFrameListener(NewFrameListener streamListener) { + for (Map.Entry pairs : mFrameListeners.entrySet()) { + VideoStream videoStream = pairs.getKey(); + if (videoStream.getHandle() == mStreamHandle) { + if (streamListener.equals(pairs.getValue())) { + mFrameListeners.remove(pairs.getKey()); + return; + } + } + } + } + + /** + * This function return stream handle. + * + * @return OpenNI stream handle. + */ + public long getHandle() { + return mStreamHandle; + } + + /** + * Gets an object through which several camera settings can be configured. + * + * @return null if the stream doesn't support camera settings. + */ + public CameraSettings getCameraSettings() { + if (NativeMethods.oniStreamIsPropertySupported(getHandle(), + NativeMethods.STREAM_PROPERTY_AUTO_EXPOSURE) + && NativeMethods.oniStreamIsPropertySupported(getHandle(), + NativeMethods.STREAM_PROPERTY_AUTO_WHITE_BALANCE)) { + return new CameraSettings(this); + } + return null; + } + + /** + * Get the current video mode information for this video stream. This includes its resolution, fps + * and stream format. + * + * @return Current video mode information for this video stream. + */ + public final VideoMode getVideoMode() { + OutArg videoMode = new OutArg(); + NativeMethods.checkReturnStatus(NativeMethods.getVideoMode(getHandle(), videoMode)); + return videoMode.mValue; + } + + /** + * Changes the current video mode of this stream. Recommended practice is to use + * {@link org.openni.Device#getSensorInfo(SensorType)}, and then + * {@link org.openni.SensorInfo#getSupportedVideoModes()} to obtain a list of valid video mode + * settings for this stream. Then, pass a valid {@link VideoMode} to setVideoMode(VideoMode) to + * ensure correct operation. + * + * @param videoMode Desired new video mode for this stream. returns Status code indicating success + * or failure of this operation. + */ + public void setVideoMode(VideoMode videoMode) { + NativeMethods.checkReturnStatus(NativeMethods.setVideoMode(getHandle(), videoMode + .getResolutionX(), videoMode.getResolutionY(), videoMode.getFps(), videoMode + .getPixelFormat().toNative())); + } + + /** + * Provides the maximum possible value for pixels obtained by this stream. This is most useful for + * getting the maximum possible value of depth streams. + * + * @return Maximum possible pixel value. + */ + public int getMaxPixelValue() { + OutArg val = new OutArg(); + NativeMethods + .oniStreamGetIntProperty(getHandle(), NativeMethods.STREAM_PROPERTY_MAX_VALUE, val); + return val.mValue; + } + + /** + * Provides the smallest possible value for pixels obtains by this VideoStream. This is most + * useful for getting the minimum possible value that will be reported by a depth stream. + * + * @return Minimum possible pixel value that can come from this stream. + */ + public int getMinPixelValue() { + OutArg val = new OutArg(); + NativeMethods.checkReturnStatus(NativeMethods.oniStreamGetIntProperty(getHandle(), + NativeMethods.STREAM_PROPERTY_MIN_VALUE, val)); + return val.mValue; + } + + /** + * Checks whether this stream supports cropping. + * + * @return true if the stream supports cropping, false if it does not. + */ + public boolean isCroppingSupported() { + return NativeMethods.oniStreamIsPropertySupported(getHandle(), + NativeMethods.STREAM_PROPERTY_CROPPING); + } + + /** + * Obtains the current cropping settings for this stream. + * + * @return CropArea CropArea object which encapsulated cropping info. + */ + public CropArea getCropping() { + OutArg xRes = new OutArg(); + OutArg yRes = new OutArg(); + OutArg w = new OutArg(); + OutArg h = new OutArg(); + + NativeMethods.checkReturnStatus(NativeMethods.getCropping(getHandle(), xRes, yRes, w, h)); + return new CropArea(xRes.mValue, yRes.mValue, w.mValue, h.mValue); + } + + /** + * Changes the cropping settings for this stream. You can use the {@link #isCroppingSupported()} + * function to make sure cropping is supported before calling this function. + * + * @param cropping CropArea object which set corresponding cropping information. + */ + public void setCropping(CropArea cropping) { + NativeMethods.checkReturnStatus(NativeMethods.setCropping(getHandle(), cropping.getOriginX(), + cropping.getOriginY(), cropping.getWidth(), cropping.getHeight())); + } + + /** + * Disables cropping. + */ + public void resetCropping() { + NativeMethods.checkReturnStatus(NativeMethods.resetCropping(getHandle())); + } + + /** + * Check whether mirroring is currently turned on for this stream. + * + * @return true if mirroring is currently enabled, false otherwise. + */ + public boolean getMirroringEnabled() { + OutArg val = new OutArg(); + NativeMethods.checkReturnStatus(NativeMethods.oniStreamGetBoolProperty(getHandle(), + NativeMethods.STREAM_PROPERTY_MIRRORING, val)); + return val.mValue; + } + + /** + * Enable or disable mirroring for this stream. + * + * @param isEnabled true to enable mirroring, false to disable it. + */ + public void setMirroringEnabled(boolean isEnabled) { + NativeMethods.checkReturnStatus(NativeMethods.oniStreamSetProperty(getHandle(), + NativeMethods.STREAM_PROPERTY_MIRRORING, isEnabled)); + } + + /** + * Gets the horizontal field of view of frames received from this stream. + * + * @return Horizontal field of view, in radians. + */ + public float getHorizontalFieldOfView() { + OutArg val = new OutArg(); + NativeMethods.checkReturnStatus(NativeMethods.oniStreamGetFloatProperty(getHandle(), + NativeMethods.STREAM_PROPERTY_HORIZONTAL_FOV, val)); + return val.mValue; + } + + /** + * Gets the vertical field of view of frames received from this stream. + * + * @return Vertical field of view, in radians. + */ + public float getVerticalFieldOfView() { + OutArg val = new OutArg(); + NativeMethods.checkReturnStatus(NativeMethods.oniStreamGetFloatProperty(getHandle(), + NativeMethods.STREAM_PROPERTY_VERTICAL_FOV, val)); + return val.mValue; + } + + /** + * Gets the sensor type for this stream. + * + * @return sensor type. + */ + public SensorType getSensorType() { + return mSensorType; + } + + private VideoStream(SensorType sensorType) { + this.mSensorType = sensorType; + } + + private static void onFrameReady(long streamHandle) { + for (Map.Entry pairs : mFrameListeners.entrySet()) { + VideoStream videoStream = pairs.getKey(); + if (videoStream.getHandle() == streamHandle) { + pairs.getValue().onFrameReady(videoStream); + } + } + + } + + private final SensorType mSensorType; + private static HashMap mFrameListeners; + private long mStreamHandle; + private long mCallbackHandle; +} From 1f36976ee28dfbc829d91abc51232668476eb168 Mon Sep 17 00:00:00 2001 From: Brian Lewis Date: Sat, 20 Jul 2013 21:36:57 -0500 Subject: [PATCH 2/3] rm trailing whitespace in VideoStream.java --- .../src/org/openni/VideoStream.java | 58 +++++++++---------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java b/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java index d7e2020c..6cd2c723 100644 --- a/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java +++ b/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java @@ -10,20 +10,20 @@ * central class used to obtain data in OpenNI. It provides the ability to manually read data in a * polling loop, as well as providing events and a Listener class that can be used to implement * event-driven data acquisition. - * + * * Aside from the video data frames themselves, the class offers a number of functions used for * obtaining information about a {@link VideoStream}. Field of view, available video modes, and * minimum and maximum valid pixel values can all be obtained. - * + * * In addition to obtaining data, the {@link VideoStream} object is used to set all configuration * properties that apply to a specific stream (rather than to an entire device). In particular, it * is used to control cropping, mirroring, and video modes. - * + * * A valid, initialized device that provides the desired stream type is required to create a stream. - * + * * Several video streams can be created to stream data from the same sensor. This is useful if * several components of an application need to read frames separately. - * + * * While some device might allow different streams from the same sensor to have different * configurations, most devices will have a single configuration for the sensor, shared by all * streams. @@ -36,7 +36,7 @@ public class VideoStream { public class CameraSettings { /** * Set Auto Exposure Enabled for corresponding sensor - * + * * @param enabled boolean value for set and unset auto exposure mode */ public void setAutoExposureEnabled(boolean enabled) { @@ -46,7 +46,7 @@ public void setAutoExposureEnabled(boolean enabled) { /** * Set Auto White Balance for corresponding sensor - * + * * @param enabled boolean value for set and unset auto white balance mode */ public void setAutoWhiteBalanceEnabled(boolean enabled) { @@ -56,7 +56,7 @@ public void setAutoWhiteBalanceEnabled(boolean enabled) { /** * Set Auto Exposure Enabled for corresponding sensor - * + * * @return enabled boolean value which define auto exposure mode state */ public boolean getAutoExposureEnabled() { @@ -68,7 +68,7 @@ public boolean getAutoExposureEnabled() { /** * Set Auto White Balance Enabled for corresponding sensor - * + * * @return enabled boolean value which define auto white balance mode state */ public boolean getAutoWhiteBalanceEnabled() { @@ -105,7 +105,7 @@ public interface NewFrameListener { * reference to a Device that supplies the sensor type requested. You can use * {@link org.openni.Device#hasSensor(SensorType)} to check whether a given sensor is available on * your target device before calling create(). - * + * * @param device A reference to the {@link Device} you want to create the stream on. * @param sensorType The type of sensor the stream should produce data from. */ @@ -130,10 +130,10 @@ public void destroy() { /** * Provides the {@link SensorInfo} object associated with the sensor that is producing this * {@link VideoStream}. - * + * * {@link SensorInfo} is useful primarily as a means of learning which video modes are valid for * this VideoStream. - * + * * @return SensorInfo object associated with the sensor providing this stream. */ public final SensorInfo getSensorInfo() { @@ -161,7 +161,7 @@ public void stop() { * {@link VideoStream.NewFrameListener} to implement an event driven architecture. Another * alternative is to use {@link org.openni.OpenNI#waitForAnyStream(java.util.List, int)} to wait * for new frames from several streams - * + * * @return VideoFrameRef object which hold the data of the new frame. */ public VideoFrameRef readFrame() { @@ -174,7 +174,7 @@ public VideoFrameRef readFrame() { * Adds a new Listener to receive this VideoStream onNewFrame event. See * {@link VideoStream.NewFrameListener} for more information on implementing an event driven * frame reading architecture. - * + * * @param streamListener Object which implements {@link VideoStream.NewFrameListener} that will * respond to this event. */ @@ -185,7 +185,7 @@ public void addNewFrameListener(NewFrameListener streamListener) { /** * Removes a Listener from this video stream list. The listener removed will no longer receive new * frame events from this stream. - * + * * @param streamListener Object of the listener to be removed. */ public void removeNewFrameListener(NewFrameListener streamListener) { @@ -202,7 +202,7 @@ public void removeNewFrameListener(NewFrameListener streamListener) { /** * This function return stream handle. - * + * * @return OpenNI stream handle. */ public long getHandle() { @@ -211,7 +211,7 @@ public long getHandle() { /** * Gets an object through which several camera settings can be configured. - * + * * @return null if the stream doesn't support camera settings. */ public CameraSettings getCameraSettings() { @@ -227,7 +227,7 @@ public CameraSettings getCameraSettings() { /** * Get the current video mode information for this video stream. This includes its resolution, fps * and stream format. - * + * * @return Current video mode information for this video stream. */ public final VideoMode getVideoMode() { @@ -242,7 +242,7 @@ public final VideoMode getVideoMode() { * {@link org.openni.SensorInfo#getSupportedVideoModes()} to obtain a list of valid video mode * settings for this stream. Then, pass a valid {@link VideoMode} to setVideoMode(VideoMode) to * ensure correct operation. - * + * * @param videoMode Desired new video mode for this stream. returns Status code indicating success * or failure of this operation. */ @@ -255,7 +255,7 @@ public void setVideoMode(VideoMode videoMode) { /** * Provides the maximum possible value for pixels obtained by this stream. This is most useful for * getting the maximum possible value of depth streams. - * + * * @return Maximum possible pixel value. */ public int getMaxPixelValue() { @@ -268,7 +268,7 @@ public int getMaxPixelValue() { /** * Provides the smallest possible value for pixels obtains by this VideoStream. This is most * useful for getting the minimum possible value that will be reported by a depth stream. - * + * * @return Minimum possible pixel value that can come from this stream. */ public int getMinPixelValue() { @@ -280,7 +280,7 @@ public int getMinPixelValue() { /** * Checks whether this stream supports cropping. - * + * * @return true if the stream supports cropping, false if it does not. */ public boolean isCroppingSupported() { @@ -290,7 +290,7 @@ public boolean isCroppingSupported() { /** * Obtains the current cropping settings for this stream. - * + * * @return CropArea CropArea object which encapsulated cropping info. */ public CropArea getCropping() { @@ -306,7 +306,7 @@ public CropArea getCropping() { /** * Changes the cropping settings for this stream. You can use the {@link #isCroppingSupported()} * function to make sure cropping is supported before calling this function. - * + * * @param cropping CropArea object which set corresponding cropping information. */ public void setCropping(CropArea cropping) { @@ -323,7 +323,7 @@ public void resetCropping() { /** * Check whether mirroring is currently turned on for this stream. - * + * * @return true if mirroring is currently enabled, false otherwise. */ public boolean getMirroringEnabled() { @@ -335,7 +335,7 @@ public boolean getMirroringEnabled() { /** * Enable or disable mirroring for this stream. - * + * * @param isEnabled true to enable mirroring, false to disable it. */ public void setMirroringEnabled(boolean isEnabled) { @@ -345,7 +345,7 @@ public void setMirroringEnabled(boolean isEnabled) { /** * Gets the horizontal field of view of frames received from this stream. - * + * * @return Horizontal field of view, in radians. */ public float getHorizontalFieldOfView() { @@ -357,7 +357,7 @@ public float getHorizontalFieldOfView() { /** * Gets the vertical field of view of frames received from this stream. - * + * * @return Vertical field of view, in radians. */ public float getVerticalFieldOfView() { @@ -369,7 +369,7 @@ public float getVerticalFieldOfView() { /** * Gets the sensor type for this stream. - * + * * @return sensor type. */ public SensorType getSensorType() { From c4b4d862c879bfb9216eb734b3be1c90d845882b Mon Sep 17 00:00:00 2001 From: Brian Lewis Date: Sun, 21 Jul 2013 01:59:40 -0500 Subject: [PATCH 3/3] Enable VideoStreams to have multiple NewFrameListeners Before this change, VideoStream tried to keep track of NewFrameListeners with a Map keyed by VideoStream. But this is unworkable, because subsequent addNewFrameListener calls would overwrite previous ones. After this change, VideoStream keeps a Map keyed by Long for the primitive video stream handle. The values of the Map are a simple container that holds a VideoStream and all of that VideoStream's NewFrameListeners. --- .../src/org/openni/VideoStream.java | 57 +++++++++++++------ 1 file changed, 39 insertions(+), 18 deletions(-) diff --git a/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java b/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java index 6cd2c723..d58a8771 100644 --- a/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java +++ b/Wrappers/java/OpenNI.java/src/org/openni/VideoStream.java @@ -1,8 +1,9 @@ package org.openni; -import java.util.Map; import java.util.HashMap; -import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; /** * The {@link VideoStream} object encapsulates a single video stream from a device. Once created, it @@ -111,7 +112,9 @@ public interface NewFrameListener { */ public static VideoStream create(Device device, SensorType sensorType) { VideoStream videoStream = new VideoStream(sensorType); - if (mFrameListeners == null) mFrameListeners = new HashMap(); + if (mHandleToFrameListenerRecord == null) { + mHandleToFrameListenerRecord = new HashMap(); + } NativeMethods.checkReturnStatus(NativeMethods.oniDeviceCreateStream(device.getHandle(), sensorType.toNative(), videoStream)); return videoStream; @@ -178,8 +181,16 @@ public VideoFrameRef readFrame() { * @param streamListener Object which implements {@link VideoStream.NewFrameListener} that will * respond to this event. */ - public void addNewFrameListener(NewFrameListener streamListener) { - mFrameListeners.put(this, streamListener); + public void addNewFrameListener(NewFrameListener frameListener) { + Long h = new Long(getHandle()); + synchronized(mHandleToFrameListenerRecord) { + FrameListenerRecord frameListenerRecord = mHandleToFrameListenerRecord.get(h); + if (frameListenerRecord == null) { + frameListenerRecord = new FrameListenerRecord(this); + mHandleToFrameListenerRecord.put(h, frameListenerRecord); + } + frameListenerRecord.frameListeners.add(frameListener); + } } /** @@ -188,14 +199,12 @@ public void addNewFrameListener(NewFrameListener streamListener) { * * @param streamListener Object of the listener to be removed. */ - public void removeNewFrameListener(NewFrameListener streamListener) { - for (Map.Entry pairs : mFrameListeners.entrySet()) { - VideoStream videoStream = pairs.getKey(); - if (videoStream.getHandle() == mStreamHandle) { - if (streamListener.equals(pairs.getValue())) { - mFrameListeners.remove(pairs.getKey()); - return; - } + public void removeNewFrameListener(NewFrameListener frameListener) { + Long h = new Long(getHandle()); + synchronized(mHandleToFrameListenerRecord) { + FrameListenerRecord frameListenerRecord = mHandleToFrameListenerRecord.get(h); + if (frameListenerRecord != null) { + frameListenerRecord.frameListeners.remove(frameListener); } } } @@ -381,17 +390,29 @@ private VideoStream(SensorType sensorType) { } private static void onFrameReady(long streamHandle) { - for (Map.Entry pairs : mFrameListeners.entrySet()) { - VideoStream videoStream = pairs.getKey(); - if (videoStream.getHandle() == streamHandle) { - pairs.getValue().onFrameReady(videoStream); + Long h = new Long(streamHandle); + synchronized(mHandleToFrameListenerRecord) { + FrameListenerRecord frameListenerRecord = mHandleToFrameListenerRecord.get(h); + if (frameListenerRecord != null) { + for (NewFrameListener frameListener : frameListenerRecord.frameListeners) { + frameListener.onFrameReady(frameListenerRecord.videoStream); + } } } + } + private final class FrameListenerRecord { + public final VideoStream videoStream; + public final Set frameListeners; + + public FrameListenerRecord(VideoStream videoStream) { + this.videoStream = videoStream; + this.frameListeners = new LinkedHashSet(); + } } private final SensorType mSensorType; - private static HashMap mFrameListeners; + private static Map mHandleToFrameListenerRecord; private long mStreamHandle; private long mCallbackHandle; }