Commit b4909164 authored by NicolaiO's avatar NicolaiO 🐼 Committed by TIGERs GitLab
Browse files

Resolve "No bots detected when tSent and tCapture diverge significantly"

Closes #1446

See merge request main/Sumatra!1176

sumatra-commit: e36c254139aa52f3a1ecb385ad782dcb0583d2f9
parent be550596
Pipeline #7125 passed with stage
in 1 minute and 36 seconds
......@@ -26,41 +26,41 @@ import edu.tigers.sumatra.network.NetworkUtility;
/**
* The most important implementation of the {@link ACam}-type, which is capable of receiving SSL-Vision data on a
* certain port (and multicast-group)
*
*
* @author Gero
*/
public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, IConfigObserver
{
private static final Logger log = Logger.getLogger(SSLVisionCam.class.getName());
private static final int BUFFER_SIZE = 10000;
private final byte[] bufferArr = new byte[BUFFER_SIZE];
private Thread cam;
private MulticastUDPReceiver receiver;
private boolean expectIOE = false;
private final SSLVisionCamGeometryTranslator geometryTranslator = new SSLVisionCamGeometryTranslator();
@Configurable(defValue = "10006")
private static int port = 10006;
@Configurable(defValue = "224.5.23.2")
private static String address = "224.5.23.2";
@Configurable(comment = "Enter a network address to limit network to a certain network interface")
private static String network = "";
private final TimeSync timeSync = new TimeSync();
static
{
ConfigRegistration.registerClass("user", SSLVisionCam.class);
}
@Override
public void startModule()
{
......@@ -75,45 +75,45 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
receiver = new MulticastUDPReceiver(port, address, nif);
}
receiver.addObserver(this);
cam = new Thread(this, "SSLVisionCam");
cam.start();
ConfigRegistration.registerConfigurableCallback("user", this);
}
@Override
public void stopModule()
{
cleanup();
ConfigRegistration.unregisterConfigurableCallback("user", this);
}
@Override
public void run()
{
// Create new buffer
final ByteBuffer buffer = ByteBuffer.wrap(bufferArr);
while (!Thread.currentThread().isInterrupted())
{
try
{
buffer.clear();
// Fetch packet
final DatagramPacket packet = new DatagramPacket(buffer.array(), buffer.capacity());
if (receiver == null)
{
break;
}
receiver.receive(packet);
final ByteArrayInputStream packetIn = new ByteArrayInputStream(packet.getData(), 0, packet.getLength());
// Translate
final SSL_WrapperPacket sslPacket;
try
......@@ -124,23 +124,23 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
log.error("invalid ssl package", err);
continue;
}
// start with sending out the detection. It is most time critical
if (sslPacket.hasDetection())
{
timeSync.update(sslPacket.getDetection().getTSent());
timeSync.update(sslPacket.getDetection().getTCapture());
notifyNewCameraFrame(sslPacket.getDetection(), timeSync);
}
if (sslPacket.hasGeometry())
{
final CamGeometry geometry = geometryTranslator.translate(sslPacket.getGeometry());
notifyNewCameraCalibration(geometry);
}
notifyNewVisionPacket(sslPacket);
} catch (final IOException err)
{
if (!expectIOE)
......@@ -153,19 +153,19 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
log.error("Error in SSL vision cam", err);
}
}
// Cleanup
expectIOE = true;
}
@Override
public void onInterfaceTimedOut()
{
notifyVisionLost();
}
private void cleanup()
{
if (cam != null)
......@@ -173,7 +173,7 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
cam.interrupt();
cam = null;
}
if (receiver != null)
{
expectIOE = true;
......@@ -181,8 +181,8 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
receiver = null;
}
}
@Override
public void afterApply(final IConfigClient configClient)
{
......@@ -192,8 +192,8 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
startModule();
}
}
/**
* @return the port
*/
......@@ -201,8 +201,8 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
{
return port;
}
/**
* @return the address
*/
......
......@@ -50,88 +50,84 @@ import edu.tigers.sumatra.vision.tracker.RobotTracker;
/**
* Is responsible for everything that happens on a single camera.
*
* @author AndreR <andre@ryll.cc>
*/
public class CamFilter
{
@SuppressWarnings("unused")
private static final Logger log = Logger
.getLogger(CamFilter.class.getName());
private static final Logger log = Logger.getLogger(CamFilter.class.getName());
private final int camId;
private static final int FRAME_FILTER_NUM_SAMPLES = 100;
private static final int FRAME_FILTER_DIVIDER = 6;
private final FirstOrderMultiSampleEstimator frameIntervalFilter = new FirstOrderMultiSampleEstimator(
FRAME_FILTER_NUM_SAMPLES);
private long lastCamFrameId;
private Optional<CamCalibration> calibration = Optional.empty();
private Optional<IRectangle> fieldRectWithBoundary = Optional.empty();
private Optional<IRectangle> fieldRect = Optional.empty();
private Optional<IRectangle> viewport = Optional.empty();
private IVector2 lastKnownBallPosition = Vector2f.ZERO_VECTOR;
private long lastBallVisibleTimestamp = 0;
private final Map<BotID, RobotTracker> robots = new HashMap<>();
private final List<BallTracker> balls = new ArrayList<>();
private Map<BotID, RobotInfo> robotInfoMap = new HashMap<>();
private CircularFifoQueue<CamBall> ballHistory = new CircularFifoQueue<>(100);
@Configurable(defValue = "0.2", comment = "Time in [s] after an invisible ball is removed")
private static double invisibleLifetimeBall = 0.2;
@Configurable(defValue = "2.0", comment = "Time in [s] after an invisible robot is removed")
private static double invisibleLifetimeRobot = 2.0;
@Configurable(defValue = "0.05", comment = "Time in [s] after which virtual balls are generated from robot info (barrier)")
private static double delayToVirtualBalls = 0.05;
@Configurable(defValue = "10", comment = "Maximum number of ball trackers")
private static int maxBallTrackers = 10;
@Configurable(defValue = "1000.0", comment = "Max. distance to last know ball location to create virtual balls")
private static double maxVirtualBallDistance = 1000.0;
@Configurable(defValue = "true", comment = "Restrict viewport to minimize overlap (from CameraArchitect).")
private static boolean restrictViewport = true;
@Configurable(defValue = "0.6", comment = "Max. velocity loss at ball-bot hull collisions")
private static double maxBallBotHullLoss = 0.6;
@Configurable(defValue = "1.0", comment = "Max. velocity loss at ball-bot front collisions")
private static double maxBallBotFrontLoss = 1.0;
@Configurable(defValue = "130.0", comment = "Max. height for ball-bot collision check")
private static double maxHeightForCollision = 130.0;
static
{
ConfigRegistration.registerClass("vision", CamFilter.class);
}
/**
* Create a new camera filter.
*
*
* @param camId
*/
public CamFilter(final int camId)
{
this.camId = camId;
}
/**
* Update this camera filter with a new detection frame.
* Merged robots may serve as prior knowledge to initialize new trackers.
*
*
* @param frame
* @param lastFilteredFrame
* @return adjusted tCapture
......@@ -139,29 +135,29 @@ public class CamFilter
public long update(final CamDetectionFrame frame, final FilteredVisionFrame lastFilteredFrame)
{
CamDetectionFrame adjustedFrame = adjustTCapture(frame);
processRobots(adjustedFrame, lastFilteredFrame.getBots());
processBalls(adjustedFrame, lastFilteredFrame.getBall(), lastFilteredFrame.getBots());
return adjustedFrame.gettCapture();
}
/**
* Update camera calibration data.
* Needed for flying ball detection.
*
*
* @param calib
*/
public void update(final CamCalibration calib)
{
calibration = Optional.of(calib);
}
/**
* Update field size from vision.
*
*
* @param field
*/
public void update(final CamFieldSize field)
......@@ -169,28 +165,28 @@ public class CamFilter
fieldRectWithBoundary = Optional.of(field.getFieldWithBoundary().withMargin(500));
fieldRect = Optional.of(field.getField());
}
/**
* Update viewport for this cam filter.
*
*
* @param rect
*/
public void updateViewport(final IRectangle rect)
{
viewport = Optional.ofNullable(rect);
}
public void setRobotInfoMap(final Map<BotID, RobotInfo> robotInfos)
{
robotInfoMap = robotInfos;
}
/**
* Set ball info from BallFilter.
*
*
* @param ballFilterOutput
*/
public void setBallInfo(final BallFilterOutput ballFilterOutput)
......@@ -198,38 +194,46 @@ public class CamFilter
lastKnownBallPosition = ballFilterOutput.getLastKnownPosition().getXYVector();
lastBallVisibleTimestamp = ballFilterOutput.getFilteredBall().getLastVisibleTimestamp();
}
private CamDetectionFrame adjustTCapture(final CamDetectionFrame frame)
{
if (frame.getCamFrameNumber() != (lastCamFrameId + 1))
{
if (lastCamFrameId != 0)
{
log.warn("Missing cam frame: " + lastCamFrameId + " -> " + frame.getCamFrameNumber());
}
frameIntervalFilter.reset();
}
lastCamFrameId = frame.getCamFrameNumber();
if ((frame.getCamFrameNumber() % FRAME_FILTER_DIVIDER) == 0)
{
frameIntervalFilter.addSample(frame.getCamFrameNumber(), frame.gettCapture());
}
IVector2 estimate = frameIntervalFilter.getBestEstimate().orElse(Vector2.fromXY(frame.gettCapture(), 0.0));
double tCapture = estimate.x() + (estimate.y() * frame.getCamFrameNumber());
if (tCapture < 0)
{
log.warn("Negative tCapture: " + tCapture + ". Check your vision-setup.");
}
return new CamDetectionFrame(frame, (long) tCapture);
}
private List<RobotCollisionShape> getRobotCollisionShapes(final List<FilteredVisionBot> mergedRobots)
{
List<RobotCollisionShape> shapes = new ArrayList<>();
for (FilteredVisionBot bot : mergedRobots)
{
RobotInfo robotInfo = robotInfoMap.get(bot.getBotID());
final double center2Drib;
final double botRadius;
if (robotInfo != null)
......@@ -241,24 +245,24 @@ public class CamFilter
center2Drib = Geometry.getOpponentCenter2DribblerDist();
botRadius = Geometry.getBotRadius();
}
shapes.add(new RobotCollisionShape(bot.getPos(), bot.getOrientation(), botRadius, center2Drib,
maxBallBotHullLoss, maxBallBotFrontLoss));
}
return shapes;
}
private List<CamBall> getVirtualBalls(final long timestamp, final long frameId)
{
List<CamBall> virtualBalls = new ArrayList<>();
if (((timestamp - lastBallVisibleTimestamp) * 1e-9) < delayToVirtualBalls)
{
return virtualBalls;
}
for (RobotInfo r : robotInfoMap.values())
{
RobotTracker tracker = robots.get(r.getBotId());
......@@ -266,11 +270,11 @@ public class CamFilter
{
continue;
}
IVector2 ballAtDribblerPos = tracker.getPosition(timestamp)
.addNew(Vector2.fromAngle(tracker.getOrientation(timestamp))
.scaleTo(r.getCenter2DribblerDist() + Geometry.getBallRadius()));
if ((ballAtDribblerPos.distanceTo(lastKnownBallPosition) < maxVirtualBallDistance) && r.isBarrierInterrupted())
{
CamBall camBall = new CamBall(0, 0, Vector3.from2d(ballAtDribblerPos, 0), Vector2f.ZERO_VECTOR,
......@@ -278,52 +282,52 @@ public class CamFilter
virtualBalls.add(camBall);
}
}
return virtualBalls;
}
/**
* Get camera position from calibration data.
*
*
* @return
*/
private Optional<IVector3> getCameraPosition()
{
return calibration.map(CamCalibration::getCameraPosition);
}
/**
* Get average time between two detection frames.
*
*
* @return
*/
public double getAverageFrameDt()
{
return frameIntervalFilter.getBestEstimate().orElse(Vector2.fromXY(0, 0)).y() * 1e-9;
}
private void processRobots(final CamDetectionFrame frame, final List<FilteredVisionBot> mergedRobots)
{
// remove trackers of bots that have not been visible for some time
robots.entrySet()
.removeIf(
e -> ((frame.gettCapture() - e.getValue().getLastUpdateTimestamp()) * 1e-9) > invisibleLifetimeRobot);
// remove trackers out of field
fieldRectWithBoundary.ifPresent(iRectangle -> robots.entrySet()
.removeIf(
e -> !iRectangle.isPointInShape(e.getValue().getPosition(frame.gettCapture()))));
// do a prediction on all trackers
for (RobotTracker r : robots.values())
{
r.predict(frame.gettCapture(), getAverageFrameDt());
}
for (CamRobot r : frame.getRobots())
{
// ignore robots outside our viewport
......@@ -331,13 +335,13 @@ public class CamFilter
{
continue;
}
// check if there are other robots very close by, could be a false vision detection then
// we filter out the robot with the cam bots id before to allow trackers at the same location
long numCloseTrackers = mergedRobots.stream()
.filter(m -> m.getBotID() != r.getBotId())
.filter(m -> m.getPos().distanceTo(r.getPos()) < (Geometry.getBotRadius() * 1.5)).count();
if (numCloseTrackers > 0)
{
log.debug("[" + r.getCameraId() + "] Ignoring new robot " + r.getBotId());
......@@ -355,15 +359,15 @@ public class CamFilter
}
}
}
private void createNewRobotTracker(final CamRobot robot, final List<FilteredVisionBot> mergedRobots)
{
RobotTracker tracker;
Optional<FilteredVisionBot> filteredBot = mergedRobots.stream()
.filter(m -> m.getBotID() == robot.getBotId())
.findFirst();
if (filteredBot.isPresent() && fieldRectWithBoundary.isPresent()
&& fieldRectWithBoundary.get().isPointInShape(filteredBot.get().getPos()))
{
......@@ -377,37 +381,36 @@ public class CamFilter
robots.put(robot.getBotId(), tracker);
}