Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions crates/store/re_tf/src/transform_forest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -714,12 +714,12 @@ fn pinhole3d_from_image_plane(
let translation = (glam::DVec2::from(-image_from_camera.principal_point()) * scale)
.extend(pinhole_image_plane_distance);

// For anamorphic cameras, use geometric mean for z-scale to balance both dimensions equally
let z_scale = (scale.x * scale.y).sqrt();

let image_plane3d_from_2d_content = glam::DAffine3::from_translation(translation)
// We want to preserve any depth that might be on the pinhole image.
// Use harmonic mean of x/y scale for those.
* glam::DAffine3::from_scale(
scale.extend(2.0 / (1.0 / scale.x + 1.0 / scale.y)),
);
* glam::DAffine3::from_scale(scale.extend(z_scale));

// Our interpretation of the pinhole camera implies that the axis semantics, i.e. ViewCoordinates,
// determine how the image plane is oriented.
Expand Down
11 changes: 9 additions & 2 deletions crates/viewer/re_renderer/shader/global_bindings.wgsl
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,16 @@ struct FrameUniformBuffer {
/// Camera position in world space.
camera_position: vec3f,

/// For perspective: Multiply this with a camera distance to get a measure of how wide a pixel is in world units.
/// Padding to ensure proper alignment (vec2 needs 8-byte alignment).
_padding0: f32,

/// For perspective: Multiply this with a camera distance to get a measure of how wide a pixel is in world units (x and y separately for anamorphic cameras).
/// For orthographic: This is the world size value, independent of distance.
pixel_world_size_from_camera_distance: f32,
/// Note: This appears as vec2f in WGSL but occupies 16 bytes (vec4f space) due to struct layout rules.
pixel_world_size_from_camera_distance: vec2f,

/// Explicit padding after vec2f (WGSL vec2f in struct uses 8 bytes but next field aligns to 16).
_padding_after_pixel_size: vec2f,

/// Camera direction in world space.
/// Same as -vec3f(view_from_world[0].z, view_from_world[1].z, view_from_world[2].z)
Expand Down
2 changes: 1 addition & 1 deletion crates/viewer/re_renderer/shader/lines.wgsl
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ fn compute_coverage(in: VertexOut) -> f32 {

if !has_any_flag(in.fragment_flags, FLAG_CAP_TRIANGLE) {
let distance_to_skeleton = distance_to_line(in.position_world, in.rounded_inner_line_begin, in.rounded_inner_line_end);
let pixel_world_size = approx_pixel_world_size_at(length(in.position_world - frame.camera_position));
let pixel_world_size = average_approx_pixel_world_size_at(length(in.position_world - frame.camera_position));

// It's important that we do antialias both inwards and outwards of the exact border.
// If we do only outwards, rectangle outlines won't line up nicely
Expand Down
13 changes: 11 additions & 2 deletions crates/viewer/re_renderer/shader/utils/camera.wgsl
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,19 @@ fn ray_sphere_distance(ray: Ray, sphere_origin: vec3f, sphere_radius: f32) -> ve
return vec2f(d, -b - sqrt(max(h, 0.0)));
}

// Returns the projected size of a pixel at a given distance from the camera.
// Returns the projected size of a pixel at a given distance from the camera, for both x and y directions.
//
// This is accurate for objects in the middle of the screen, (depending on the angle) less so at the corners
// since an object parallel to the camera (like a conceptual pixel) has a bigger projected surface at higher angles.
fn approx_pixel_world_size_at(camera_distance: f32) -> f32 {
//
// For anamorphic cameras, returns different values for x and y components.
fn approx_pixel_world_size_at(camera_distance: f32) -> vec2f {
return select(frame.pixel_world_size_from_camera_distance, camera_distance * frame.pixel_world_size_from_camera_distance, is_camera_perspective());
}

// Returns the average projected pixel size at a given distance from the camera.
// Useful for isotropic operations like point radii and antialiasing that don't need directional information.
fn average_approx_pixel_world_size_at(camera_distance: f32) -> f32 {
let pixel_size = approx_pixel_world_size_at(camera_distance);
return (pixel_size.x + pixel_size.y) * 0.5;
}
2 changes: 1 addition & 1 deletion crates/viewer/re_renderer/shader/utils/size.wgsl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

fn world_size_from_point_size(size_in_points: f32, camera_distance: f32) -> f32 {
let pixel_size = frame.pixels_from_point * size_in_points;
return approx_pixel_world_size_at(camera_distance) * pixel_size;
return average_approx_pixel_world_size_at(camera_distance) * pixel_size;
}

// Resolves a size (see size.rs!) to a world scale size.
Expand Down
4 changes: 2 additions & 2 deletions crates/viewer/re_renderer/shader/utils/sphere_quad.wgsl
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ fn circle_quad(point_pos: vec3f, point_radius: f32, top_bottom: f32, left_right:
// Add half a pixel of margin for the feathering we do for antialiasing the spheres.
// It's fairly subtle but if we don't do this our spheres look slightly squarish
// TODO(andreas): Computing distance to camera here is a bit excessive, should get distance more easily - keep in mind this code runs for ortho & perspective.
let radius = point_radius + 0.5 * approx_pixel_world_size_at(distance(point_pos, frame.camera_position));
let radius = point_radius + 0.5 * average_approx_pixel_world_size_at(distance(point_pos, frame.camera_position));

return point_pos + pos_in_quad * radius;
}
Expand Down Expand Up @@ -98,7 +98,7 @@ fn sphere_quad_coverage(world_position: vec3f, radius: f32, sphere_center: vec3f
let d = ray_sphere_distance(ray, sphere_center, radius);
let distance_to_sphere_surface = d.x;
let closest_ray_dist = d.y;
let pixel_world_size = approx_pixel_world_size_at(closest_ray_dist);
let pixel_world_size = average_approx_pixel_world_size_at(closest_ray_dist);

let distance_to_surface_in_pixels = distance_to_sphere_surface / pixel_world_size;

Expand Down
8 changes: 6 additions & 2 deletions crates/viewer/re_renderer/src/global_bindings.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,13 @@ pub struct FrameUniformBuffer {
/// Camera position in world space.
pub camera_position: glam::Vec3,

/// For perspective: Multiply this with a camera distance to get a measure of how wide a pixel is in world units.
/// Padding to align to 16 bytes after Vec3.
pub _padding0: f32,

/// For perspective: Multiply this with a camera distance to get a measure of how wide a pixel is in world units (x and y separately for anamorphic cameras).
/// For orthographic: This is the world size value, independent of distance.
pub pixel_world_size_from_camera_distance: f32,
/// Using Vec2RowPadded to match WGSL vec2f alignment in structs (16 bytes).
pub pixel_world_size_from_camera_distance: wgpu_buffer_types::Vec2RowPadded,

/// Camera direction in world space.
/// Same as `-view_from_world.row(2).truncate()`
Expand Down
67 changes: 57 additions & 10 deletions crates/viewer/re_renderer/src/view_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,21 @@ pub enum Projection {
aspect_ratio: f32,
},

/// Perspective camera with different horizontal and vertical fields of view (anamorphic).
///
/// This is used for cameras with non-square pixels or asymmetric optical properties,
/// where the focal lengths in x and y directions differ (fx ≠ fy).
PerspectiveAnamorphic {
/// Viewing angle in view space x direction (horizontal screen axis) in radian.
horizontal_fov: f32,

/// Viewing angle in view space y direction (vertical screen axis) in radian.
vertical_fov: f32,

/// Distance of the near plane.
near_plane_distance: f32,
},

/// Orthographic projection with the camera position at the near plane's center,
/// looking along the negative z view space axis.
Orthographic {
Expand Down Expand Up @@ -132,6 +147,31 @@ impl Projection {
near_plane_distance,
)
}
Self::PerspectiveAnamorphic {
horizontal_fov,
vertical_fov,
near_plane_distance,
} => {
// Build custom infinite reverse-z projection matrix for anamorphic cameras
// Based on standard perspective projection but with separate focal lengths
let tan_half_fov_x = (horizontal_fov * 0.5).tan();
let tan_half_fov_y = (vertical_fov * 0.5).tan();

// For infinite reverse-z projection:
// x_ndc = x_view / (z_view * tan_half_fov_x)
// y_ndc = y_view / (z_view * tan_half_fov_y)
// z_ndc = near / z_view (reverse-z, maps near plane to 1.0, infinity to 0.0)

let x_scale = 1.0 / tan_half_fov_x;
let y_scale = 1.0 / tan_half_fov_y;

glam::Mat4::from_cols(
glam::vec4(x_scale, 0.0, 0.0, 0.0),
glam::vec4(0.0, y_scale, 0.0, 0.0),
glam::vec4(0.0, 0.0, 0.0, -1.0),
glam::vec4(0.0, 0.0, near_plane_distance, 0.0),
)
}
Self::Orthographic {
camera_mode,
vertical_world_size,
Expand Down Expand Up @@ -178,6 +218,17 @@ impl Projection {
(vertical_fov * 0.5).tan(),
)
}
Self::PerspectiveAnamorphic {
horizontal_fov,
vertical_fov,
..
} => {
// For anamorphic cameras, calculate tan_half_fov directly from the FOVs
glam::vec2(
(horizontal_fov * 0.5).tan(),
(vertical_fov * 0.5).tan(),
)
}
Self::Orthographic { .. } => glam::vec2(f32::MAX, f32::MAX), // Can't use infinity in shaders
}
}
Expand Down Expand Up @@ -481,7 +532,7 @@ impl ViewBuilder {
config.resolution_in_pixel[1] as f32,
);
let pixel_world_size_from_camera_distance = match config.projection_from_view {
Projection::Perspective { .. } => {
Projection::Perspective { .. } | Projection::PerspectiveAnamorphic { .. } => {
// Determine how wide a pixel is in world space at unit distance from the camera.
//
// derivation:
Expand All @@ -491,6 +542,8 @@ impl ViewBuilder {
// want: pixels in world per distance, i.e (screen_in_world / resolution / distance)
// => (resolution / screen_in_world / distance) = tan(FOV / 2) * distance * 2 / resolution / distance =
// = tan(FOV / 2) * 2.0 / resolution
//
// For anamorphic cameras, tan_half_fov already contains separate x and y components
tan_half_fov * 2.0 / resolution
}
Projection::Orthographic {
Expand All @@ -514,13 +567,6 @@ impl ViewBuilder {
let pixel_world_size_from_camera_distance =
pixel_world_size_from_camera_distance * config.viewport_transformation.scale();

// Unless the transformation intentionally stretches the image,
// our world size -> pixel size conversation factor should be roughly the same in both directions.
//
// As of writing, the shaders dealing with pixel size estimation, can't deal with non-uniform
// scaling in the viewport transformation.
let pixel_world_size_from_camera_distance = pixel_world_size_from_camera_distance.x;

let mut view_from_world = config.view_from_world.to_mat4();
// For OrthographicCameraMode::TopLeftCorner, we want Z facing forward.
match config.projection_from_view {
Expand All @@ -530,7 +576,7 @@ impl ViewBuilder {
}
OrthographicCameraMode::NearPlaneCenter => {}
},
Projection::Perspective { .. } => {}
Projection::Perspective { .. } | Projection::PerspectiveAnamorphic { .. } => {}
}

let camera_position = config.view_from_world.inverse().translation();
Expand All @@ -543,8 +589,9 @@ impl ViewBuilder {
projection_from_view: projection_from_view.into(),
projection_from_world: projection_from_world.into(),
camera_position,
_padding0: 0.0,
pixel_world_size_from_camera_distance: pixel_world_size_from_camera_distance.into(),
camera_forward,
pixel_world_size_from_camera_distance,
pixels_per_point: config.pixels_per_point,
tan_half_fov,
device_tier: ctx.device_caps().tier as u32,
Expand Down
30 changes: 22 additions & 8 deletions crates/viewer/re_view_spatial/src/ui_2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -340,8 +340,6 @@ fn setup_target_config(
// * a perspective camera *at the origin* for 3D rendering
// Both share the same view-builder and the same viewport transformation but are independent otherwise.

// TODO(andreas): Support anamorphic pinhole cameras properly.

let pinhole = if let Some(scene_pinhole) = scene_pinhole {
// The user has a pinhole, and we may want to project 3D stuff into this 2D space,
// and we want to use that pinhole projection to do so.
Expand Down Expand Up @@ -370,17 +368,33 @@ fn setup_target_config(
);

let focal_length = pinhole.focal_length_in_pixels();
let focal_length = 2.0 / (1.0 / focal_length.x + 1.0 / focal_length.y); // harmonic mean (lack of anamorphic support)

let projection_from_view = re_renderer::view_builder::Projection::Perspective {
vertical_fov: pinhole.fov_y(),
near_plane_distance: near_clip_plane * focal_length / 500.0, // TODO(#8373): The need to scale this by 500 is quite hacky.
aspect_ratio: pinhole.aspect_ratio(),
// Check if this is an anamorphic camera (fx ≠ fy)
let is_anamorphic = (focal_length.x - focal_length.y).abs() > f32::EPSILON * 100.0;

let projection_from_view = if is_anamorphic {
// Anamorphic camera: compute separate FOVs for x and y
let fov_x = 2.0 * (0.5 * pinhole.resolution.x / focal_length.x).atan();
let fov_y = 2.0 * (0.5 * pinhole.resolution.y / focal_length.y).atan();

re_renderer::view_builder::Projection::PerspectiveAnamorphic {
horizontal_fov: fov_x,
vertical_fov: fov_y,
near_plane_distance: near_clip_plane * focal_length.y / 500.0, // TODO(#8373): The need to scale this by 500 is quite hacky.
}
} else {
// Symmetric camera: use the standard perspective projection
re_renderer::view_builder::Projection::Perspective {
vertical_fov: pinhole.fov_y(),
near_plane_distance: near_clip_plane * focal_length.y / 500.0, // TODO(#8373): The need to scale this by 500 is quite hacky.
aspect_ratio: pinhole.aspect_ratio(),
}
};

// Position the camera looking straight at the principal point:
// Use the y focal length for the camera distance (consistent with near plane distance)
let view_from_world = macaw::IsoTransform::look_at_rh(
pinhole.principal_point().extend(-focal_length),
pinhole.principal_point().extend(-focal_length.y),
pinhole.principal_point().extend(0.0),
-glam::Vec3::Y,
)
Expand Down
49 changes: 49 additions & 0 deletions examples/python/anamorphic_camera/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
<!--[metadata]
title = "Anamorphic Camera"
tags = ["2D", "3D", "Pinhole camera", "Anamorphic"]
description = "Demonstrates anamorphic pinhole camera support with different focal lengths (fx ≠ fy)."
thumbnail = "https://static.rerun.io/anamorphic_camera/placeholder.png"
thumbnail_dimensions = [480, 480]
channel = "main"
build_args = []
-->

# Anamorphic Pinhole Camera

This example demonstrates Rerun's support for anamorphic pinhole cameras, where the focal lengths in the x and y directions differ (fx ≠ fy).

Anamorphic cameras are used in various applications:
- Non-square pixel sensors
- Anamorphic lenses in cinematography
- Some industrial and scientific imaging systems
- Cameras with intentional optical asymmetry

## What is demonstrated

The example shows:
1. **Symmetric Camera** - Standard pinhole camera with fx = fy
2. **Anamorphic Camera** - Camera with different focal lengths (fx ≠ fy)
3. **Extreme Anamorphic** - Camera with very different focal lengths to show correct handling

Each camera views the same test pattern (checkerboard grid) and 3D reference points. The visualization demonstrates that:
- The projection correctly handles different focal lengths
- The aspect ratio and field of view are properly computed
- 3D points project correctly through anamorphic cameras

## Running

```bash
python examples/python/anamorphic_camera/main.py
```

You can also specify which cameras to show:
```bash
# Show only symmetric camera
python examples/python/anamorphic_camera/main.py --camera-type symmetric

# Show only anamorphic cameras
python examples/python/anamorphic_camera/main.py --camera-type anamorphic

# Show all (default)
python examples/python/anamorphic_camera/main.py --camera-type all
```
Loading
Loading