diff --git a/doc/images/OSI_Obstruction_visibility.svg b/doc/images/OSI_Obstruction_visibility.svg new file mode 100644 index 000000000..498716183 --- /dev/null +++ b/doc/images/OSI_Obstruction_visibility.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/osi_sensorspecific.proto b/osi_sensorspecific.proto index 48b3ca9af..41aac8e03 100644 --- a/osi_sensorspecific.proto +++ b/osi_sensorspecific.proto @@ -6,6 +6,40 @@ import "osi_common.proto"; package osi3; +// +// \brief Obstruction values for a given object and sensor. +// +// For a given detected moving object, and for a particular sensor, how +// obstructed is that object from the point of view of that sensor. +// +// \note The ID of the sensor in question is available from the parent +// SensorData.sensor_id field. The ID of the object is available from +// the parent DetectedMovingObject.DetectedItemHeader.ground_truth_id. +// +message Obstruction +{ + // Total visible proportion for the object. + // + // This value considers various factors that would stop an object from being + // visible from the point of view of this sensor. It should include + // truncation (outside of the field of view of the sensor), distance (beyond + // the range of the sensor), and occlusion (how hidden is the object by + // other objects in the scene). + // + // Occlusion can be caused by both static objects (trees, buildings, etc.) + // and dynamic objects (cars, pedestrians, etc.). By convention + // semi-transparent surfaces (e.g. car windows) are considered opaque. + // + // \note The object is considered to be the 2D cross-section of the object + // as visible from the sensor. + // + // The value is in the range 0.0 (fully hidden) and 1.0 (fully visible). + // + // \image html OSI_Obstruction_visibility.svg "Obstruction visibility" + // + optional double visibility = 2; +} + // // \brief Message encapsulates all data for detected objects that is specific to // radar sensors. @@ -17,6 +51,10 @@ message RadarSpecificObjectData // Unit: dB m^2 // optional double rcs = 1; + + // Obstruction level for this object from this sensor + // + optional Obstruction obstruction = 2; } // @@ -25,8 +63,9 @@ message RadarSpecificObjectData // message LidarSpecificObjectData { - // currently no fields. + // Obstruction level for this object from this sensor // + optional Obstruction obstruction = 1; } // @@ -35,8 +74,9 @@ message LidarSpecificObjectData // message CameraSpecificObjectData { - // currently no fields. + // Obstruction level for this object from this sensor // + optional Obstruction obstruction = 1; } // @@ -85,6 +125,10 @@ message UltrasonicSpecificObjectData // repeated Signalway signalway = 5; + // Obstruction level for this object from this sensor + // + optional Obstruction obstruction = 6; + // // This indicates if the object was calculated based on one or multiple // sensors. diff --git a/osi_sensorview.proto b/osi_sensorview.proto index e6331cb76..ecd43aedf 100644 --- a/osi_sensorview.proto +++ b/osi_sensorview.proto @@ -172,6 +172,43 @@ message GenericSensorView optional GenericSensorViewConfiguration view_configuration = 1; } +// +// \brief Obstruction values for a given object and sensor. +// +// For a given object in the ground truth data, and for a particular sensor, how +// obstructed is that object from the point of view of that sensor. +// +message Obstruction +{ + // ID of the object in ground truth data + // + // This field is required as without it the entire message ships + // no useful information. + // + optional Identifier ground_truth_id = 1; + + // Total visible proportion for the object. + // + // This value considers various factors that would stop an object from being + // visible from the point of view of this sensor. It should include + // truncation (outside of the field of view of the sensor), distance (beyond + // the range of the sensor), and occlusion (how hidden is the object by + // other objects in the scene). + // + // Occlusion can be caused by both static objects (trees, buildings, etc.) + // and dynamic objects (cars, pedestrians, etc.). By convention + // semi-transparent surfaces (e.g. car windows) are considered opaque. + // + // \note The object is considered to be the 2D cross-section of the object + // as visible from the sensor. + // + // The value is in the range 0.0 (fully hidden) and 1.0 (fully visible). + // + // \image html OSI_Obstruction_visibility.svg "Obstruction visibility" + // + optional double visibility = 2; +} + // // \brief Definition of the radar sensor view. // @@ -192,6 +229,12 @@ message RadarSensorView // repeated Reflection reflection = 2; + // Obstruction level for the objects + // + // \note OSI uses singular instead of plural for repeated field names. + // + repeated Obstruction obstruction = 3; + // // \brief Definition of the radar reflection. // @@ -265,6 +308,12 @@ message LidarSensorView // repeated Reflection reflection = 2; + // Obstruction level for the objects + // + // \note OSI uses singular instead of plural for repeated field names. + // + repeated Obstruction obstruction = 3; + // // \brief Definition of the lidar reflection. // @@ -331,6 +380,12 @@ message CameraSensorView // camera sensor input configuration. // optional bytes image_data = 2; + + // Obstruction level for the objects + // + // \note OSI uses singular instead of plural for repeated field names. + // + repeated Obstruction obstruction = 3; } //