Skip to content

Commit 6d566b2

Browse files
authored
Merge pull request #229 from robotpy/examples
Add examples to repo, run tests in CI
2 parents 4b84125 + e8d6201 commit 6d566b2

File tree

112 files changed

+7146
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

112 files changed

+7146
-0
lines changed

.github/workflows/dist.yml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -261,6 +261,10 @@ jobs:
261261
run: |
262262
python -m devtools ci install-test-pure-wheels
263263
264+
- name: Test examples
265+
run: |
266+
python -m devtools test-examples
267+
264268
- name: Ensure all headers are accounted for
265269
run: |
266270
python -m devtools ci scan-headers

devtools/__main__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
from .ctx import Context
88
from . import ci
9+
from . import examples
910
from . import update_pyproject
1011

1112

@@ -34,6 +35,7 @@ def main(ctx: click.Context, verbose: bool):
3435

3536

3637
main.add_command(ci.ci)
38+
main.add_command(examples.test_examples)
3739
main.add_command(update_pyproject.update_pyproject)
3840

3941

devtools/examples.py

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
import dataclasses
2+
import os
3+
import pathlib
4+
import subprocess
5+
import sys
6+
import typing as T
7+
8+
import click
9+
import tomlkit
10+
11+
from .util import parse_input, run_cmd
12+
13+
14+
def _validate_example_list(root: pathlib.Path, expected_dirs: T.Sequence[str]) -> None:
15+
expected = sorted(f"{name}/robot.py" for name in expected_dirs)
16+
actual = sorted(p.relative_to(root).as_posix() for p in root.rglob("robot.py"))
17+
18+
if expected == actual:
19+
return
20+
21+
missing = sorted(set(expected) - set(actual))
22+
extra = sorted(set(actual) - set(expected))
23+
for path in missing:
24+
print(f"Missing: {path}")
25+
for path in extra:
26+
print(f"Extra: {path}")
27+
28+
if not os.environ.get("FORCE_ANYWAYS"):
29+
print("ERROR: Not every robot.py file is in the list of tests!")
30+
sys.exit(1)
31+
32+
33+
@dataclasses.dataclass
34+
class ExamplesTests:
35+
base: T.List[str]
36+
ignored: T.List[str]
37+
38+
39+
@dataclasses.dataclass
40+
class ExamplesConfig:
41+
tests: ExamplesTests
42+
43+
44+
def _load_tests_config(config_path: pathlib.Path) -> ExamplesConfig:
45+
try:
46+
data = tomlkit.parse(config_path.read_text(encoding="utf-8"))
47+
except FileNotFoundError:
48+
raise click.ClickException(f"Missing tests config: {config_path}")
49+
except Exception as exc:
50+
raise click.ClickException(f"Invalid tests config: {config_path}: {exc}")
51+
52+
try:
53+
return parse_input(data, ExamplesConfig, config_path)
54+
except Exception as exc:
55+
raise click.ClickException(str(exc))
56+
57+
58+
@click.command(name="test-examples")
59+
@click.argument("test_name", required=False)
60+
@click.option("-x", "--exitfirst", is_flag=True, help="Exit on first failed test.")
61+
def test_examples(test_name: str | None, exitfirst: bool) -> None:
62+
"""Run tests on robot examples."""
63+
root = pathlib.Path(__file__).parent.parent / "examples" / "robot"
64+
config_path = root / "examples.toml"
65+
66+
cfg = _load_tests_config(config_path)
67+
base_tests = cfg.tests.base
68+
ignored_tests = cfg.tests.ignored
69+
70+
every_tests = [*base_tests, *ignored_tests]
71+
_validate_example_list(root, every_tests)
72+
73+
tests_to_run = base_tests
74+
if test_name:
75+
if test_name not in every_tests:
76+
raise click.BadParameter(f"unknown example {test_name}")
77+
tests_to_run = [test_name]
78+
79+
failed_tests = []
80+
81+
for example_name in tests_to_run:
82+
test_dir = root / example_name
83+
print(test_dir.resolve())
84+
try:
85+
run_cmd(
86+
sys.executable,
87+
"-m",
88+
"robotpy",
89+
"test",
90+
"--builtin",
91+
cwd=test_dir,
92+
)
93+
except subprocess.CalledProcessError:
94+
print(f"Test in {test_dir.resolve()} failed")
95+
failed_tests.append(example_name)
96+
if exitfirst:
97+
break
98+
99+
if failed_tests:
100+
print("Failed tests:")
101+
for name in failed_tests:
102+
print(f"- {name}")
103+
sys.exit(1)
104+
105+
print("All tests successful!")

examples/robot/.gitignore

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
*.py[ocd]
2+
__pycache__
3+
.coverage
4+
.cache
5+
6+
.vscode/
7+
.deploy_cfg
8+
deploy.json
9+
10+
.project
11+
.pydevproject
12+
13+
pyproject.toml
14+
wpilib_preferences.json
15+
16+
imgui.ini
17+
simgui*.json
18+
19+
opkg_cache
20+
pip_cache
21+
22+
networktables.*
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
#!/usr/bin/env python3
2+
#
3+
# Copyright (c) FIRST and other WPILib contributors.
4+
# Open Source Software; you can modify and/or share it under the terms of
5+
# the WPILib BSD license file in the root directory of this project.
6+
#
7+
8+
import wpilib
9+
import wpimath.units
10+
11+
12+
class MyRobot(wpilib.TimedRobot):
13+
def __init__(self) -> None:
14+
super().__init__()
15+
16+
# SmartIO port 1
17+
self.led = wpilib.AddressableLED(1)
18+
19+
# Reuse buffer
20+
# Default to a length of 60
21+
self.ledData = [wpilib.AddressableLED.LEDData() for _ in range(60)]
22+
self.led.setLength(len(self.ledData))
23+
24+
# Set the data
25+
self.led.setData(self.ledData)
26+
27+
# Create an LED pattern that will display a rainbow across
28+
# all hues at maximum saturation and half brightness
29+
self.rainbow = wpilib.LEDPattern.rainbow(255, 128)
30+
31+
# Our LED strip has a density of 120 LEDs per meter
32+
self.kLedSpacing = 1 / 120.0
33+
34+
# Create a new pattern that scrolls the rainbow pattern across the LED strip, moving at a
35+
# speed of 1 meter per second.
36+
self.scrollingRainbow = self.rainbow.scrollAtAbsoluteSpeed(
37+
1,
38+
self.kLedSpacing,
39+
)
40+
41+
def robotPeriodic(self) -> None:
42+
# Update the buffer with the rainbow animation
43+
self.scrollingRainbow.applyTo(self.ledData)
44+
# Set the LEDs
45+
self.led.setData(self.ledData)
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
#!/usr/bin/env python3
2+
#
3+
# Copyright (c) FIRST and other WPILib contributors.
4+
# Open Source Software; you can modify and/or share it under the terms of
5+
# the WPILib BSD license file in the root directory of this project.
6+
#
7+
8+
9+
import wpilib
10+
import wpilib.cameraserver
11+
12+
13+
class MyRobot(wpilib.TimedRobot):
14+
"""
15+
This is a demo program showing the detection of AprilTags. The image is acquired from the USB
16+
camera, then any detected AprilTags are marked up on the image and sent to the dashboard.
17+
Be aware that the performance on this is much worse than a coprocessor solution!
18+
"""
19+
20+
def __init__(self):
21+
super().__init__()
22+
# Your image processing code will be launched via a stub that will set up logging and initialize NetworkTables
23+
# to talk to your robot code.
24+
# https://robotpy.readthedocs.io/en/stable/vision/roborio.html#important-notes
25+
26+
wpilib.CameraServer.launch("vision.py:main")
Lines changed: 152 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,152 @@
1+
#
2+
# Copyright (c) FIRST and other WPILib contributors.
3+
# Open Source Software; you can modify and/or share it under the terms of
4+
# the WPILib BSD license file in the root directory of this project.
5+
#
6+
7+
8+
import ntcore
9+
import robotpy_apriltag
10+
from cscore import CameraServer
11+
12+
import cv2
13+
import numpy as np
14+
15+
16+
#
17+
# This code will work both on a RoboRIO and on other platforms. The exact mechanism
18+
# to run it differs depending on whether you’re on a RoboRIO or a coprocessor
19+
#
20+
# https://robotpy.readthedocs.io/en/stable/vision/code.html
21+
22+
23+
def main():
24+
detector = robotpy_apriltag.AprilTagDetector()
25+
26+
# look for tag36h11, correct 1 error bit (hamming distance 1)
27+
# hamming 1 allocates 781KB, 2 allocates 27.4 MB, 3 allocates 932 MB
28+
# max of 1 recommended for RoboRIO 1, while hamming 2 is feasible on the RoboRIO 2
29+
detector.addFamily("tag36h11", 1)
30+
31+
# Set up Pose Estimator - parameters are for a Microsoft Lifecam HD-3000
32+
# (https://www.chiefdelphi.com/t/wpilib-apriltagdetector-sample-code/421411/21)
33+
poseEstConfig = robotpy_apriltag.AprilTagPoseEstimator.Config(
34+
0.1651,
35+
699.3778103158814,
36+
677.7161226393544,
37+
345.6059345433618,
38+
207.12741326228522,
39+
)
40+
estimator = robotpy_apriltag.AprilTagPoseEstimator(poseEstConfig)
41+
42+
# Get the UsbCamera from CameraServer
43+
camera = CameraServer.startAutomaticCapture()
44+
45+
# Set the resolution
46+
camera.setResolution(640, 480)
47+
48+
# Get a CvSink. This will capture Mats from the camera
49+
cvSink = CameraServer.getVideo()
50+
51+
# Set up a CvSource. This will send images back to the Dashboard
52+
outputStream = CameraServer.putVideo("Detected", 640, 480)
53+
54+
# Mats are very memory expensive. Let's reuse these.
55+
mat = np.zeros((480, 640, 3), dtype=np.uint8)
56+
grayMat = np.zeros(shape=(480, 640), dtype=np.uint8)
57+
58+
# Instantiate once
59+
tags = []
60+
outlineColor = (0, 255, 0)
61+
crossColor = (0, 0, 255)
62+
63+
# Output the list to Network Tables
64+
tagsTable = ntcore.NetworkTableInstance.getDefault().getTable("apriltags")
65+
pubTags = tagsTable.getIntegerArrayTopic("tags").publish()
66+
67+
try:
68+
while True:
69+
# Tell the CvSink to grab a frame from the camera and put it
70+
# in the source mat. If there is an error notify the output.
71+
if cvSink.grabFrame(mat) == 0:
72+
# Send the output frame the error
73+
outputStream.notifyError(cvSink.getError())
74+
75+
# Skip the rest of the current iteration
76+
continue
77+
78+
cv2.cvtColor(mat, cv2.COLOR_RGB2GRAY, dst=grayMat)
79+
80+
detections = detector.detect(grayMat)
81+
82+
# have not seen any tags yet
83+
tags.clear()
84+
85+
for detection in detections:
86+
# remember we saw this tag
87+
tags.append(detection.getId())
88+
89+
# draw lines around the tag
90+
for i in range(4):
91+
j = (i + 1) % 4
92+
point1 = (
93+
int(detection.getCorner(i).x),
94+
int(detection.getCorner(i).y),
95+
)
96+
point2 = (
97+
int(detection.getCorner(j).x),
98+
int(detection.getCorner(j).y),
99+
)
100+
mat = cv2.line(mat, point1, point2, outlineColor, 2)
101+
102+
# mark the center of the tag
103+
cx = int(detection.getCenter().x)
104+
cy = int(detection.getCenter().y)
105+
ll = 10
106+
mat = cv2.line(
107+
mat,
108+
(cx - ll, cy),
109+
(cx + ll, cy),
110+
crossColor,
111+
2,
112+
)
113+
mat = cv2.line(
114+
mat,
115+
(cx, cy - ll),
116+
(cx, cy + ll),
117+
crossColor,
118+
2,
119+
)
120+
121+
# identify the tag
122+
mat = cv2.putText(
123+
mat,
124+
str(detection.getId()),
125+
(cx + ll, cy),
126+
cv2.FONT_HERSHEY_SIMPLEX,
127+
1,
128+
crossColor,
129+
3,
130+
)
131+
132+
# determine pose
133+
pose = estimator.estimate(detection)
134+
135+
# put pose into dashboard
136+
rot = pose.rotation()
137+
tagsTable.getEntry(f"pose_{detection.getId()}").setDoubleArray(
138+
[pose.X(), pose.Y(), pose.Z(), rot.X(), rot.Y(), rot.Z()]
139+
)
140+
141+
# put list of tags onto dashboard
142+
pubTags.set(tags)
143+
144+
# Give output stream a new image to display
145+
outputStream.putFrame(mat)
146+
finally:
147+
pubTags.close()
148+
detector.close()
149+
150+
# The camera code will be killed when the robot.py program exits. If you wish to perform cleanup,
151+
# you should register an atexit handler. The child process will NOT be launched when running the robot code in
152+
# simulation or unit testing mode

0 commit comments

Comments
 (0)