Commit c320c25d authored by AndreR's avatar AndreR Committed by TIGERs GitLab
Browse files

Resolve "Sumatra simulation timing is wrong"

Closes #1584

See merge request main/Sumatra!1330

sumatra-commit: 66e44fa7f3f6431820821a6c88d03d8135c4c699
parent d84420d5
Pipeline #8628 passed with stage
in 3 minutes and 34 seconds
......@@ -24,6 +24,10 @@ public class FpsCounter
public boolean newFrame(final long timestamp)
{
boolean fpsChanged = false;
if (timestamp < lastTime)
{
reset();
}
double timeDiff = (timestamp - lastTime) / 1e9;
if (timeDiff > TIME_WINDOW)
{
......
......@@ -78,6 +78,7 @@ public class UdpTransceiver
} catch (IOException e)
{
log.warn("Failed to setup socket for {}", this, e);
socket = null;
}
}
}
......@@ -125,7 +126,7 @@ public class UdpTransceiver
public void send(byte[] bytes)
{
if (socket == null)
if (socket == null || address == null)
{
return;
}
......
......@@ -18,8 +18,10 @@ import lombok.extern.log4j.Log4j2;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.nio.ByteBuffer;
import java.util.Optional;
/**
......@@ -37,6 +39,7 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
private boolean expectIOE = false;
private int port;
private String address;
private InetAddress visionAddress;
private final SSLVisionCamGeometryTranslator geometryTranslator = new SSLVisionCamGeometryTranslator();
......@@ -45,7 +48,6 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
private static String network = "";
static
{
ConfigRegistration.registerClass("user", SSLVisionCam.class);
......@@ -111,6 +113,7 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
}
receiver.receive(packet);
visionAddress = packet.getAddress();
final ByteArrayInputStream packetIn = new ByteArrayInputStream(packet.getData(), 0, packet.getLength());
// Translate
......@@ -208,4 +211,10 @@ public class SSLVisionCam extends ACam implements Runnable, IReceiverObserver, I
{
return address;
}
public Optional<InetAddress> getVisionAddress()
{
return Optional.ofNullable(visionAddress);
}
}
......@@ -35,16 +35,20 @@ import edu.tigers.sumatra.vision.data.FilteredVisionFrame;
import edu.tigers.sumatra.vision.data.RobotCollisionShape;
import edu.tigers.sumatra.vision.tracker.BallTracker;
import edu.tigers.sumatra.vision.tracker.RobotTracker;
import lombok.Getter;
import org.apache.commons.collections4.QueueUtils;
import org.apache.commons.collections4.queue.CircularFifoQueue;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.awt.Color;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
/**
......@@ -62,6 +66,8 @@ public class CamFilter
private final FirstOrderMultiSampleEstimator frameIntervalFilter = new FirstOrderMultiSampleEstimator(
FRAME_FILTER_NUM_SAMPLES);
private long lastCamFrameId;
@Getter
private long timestamp;
private Optional<CamCalibration> calibration = Optional.empty();
private Optional<IRectangle> fieldRectWithBoundary = Optional.empty();
......@@ -71,13 +77,13 @@ public class CamFilter
private IVector2 lastKnownBallPosition = Vector2f.ZERO_VECTOR;
private long lastBallVisibleTimestamp = 0;
private final Map<BotID, RobotTracker> robots = new HashMap<>();
private final Map<BotID, RobotTracker> robots = new ConcurrentHashMap<>();
private final List<BallTracker> balls = new ArrayList<>();
private final List<BallTracker> balls = Collections.synchronizedList(new ArrayList<>());
private Map<BotID, RobotInfo> robotInfoMap = new HashMap<>();
private Map<BotID, RobotInfo> robotInfoMap = new ConcurrentHashMap<>();
private CircularFifoQueue<CamBall> ballHistory = new CircularFifoQueue<>(100);
private Queue<CamBall> ballHistory = QueueUtils.synchronizedQueue(new CircularFifoQueue<>(100));
@Configurable(defValue = "1.0", comment = "Time in [s] after an invisible ball is removed")
private static double invisibleLifetimeBall = 1.0;
......@@ -132,16 +138,15 @@ public class CamFilter
*
* @param frame
* @param lastFilteredFrame
* @return adjusted tCapture
*/
public long update(final CamDetectionFrame frame, final FilteredVisionFrame lastFilteredFrame)
public void update(final CamDetectionFrame frame, final FilteredVisionFrame lastFilteredFrame)
{
CamDetectionFrame adjustedFrame = adjustTCapture(frame);
processRobots(adjustedFrame, lastFilteredFrame.getBots());
processBalls(adjustedFrame, lastFilteredFrame.getBall(), lastFilteredFrame.getBots());
return adjustedFrame.gettCapture();
timestamp = adjustedFrame.gettCapture();
}
......@@ -204,7 +209,8 @@ public class CamFilter
{
if (lastCamFrameId != 0)
{
log.warn("Non-consecutive cam frame: " + lastCamFrameId + " -> " + frame.getCamFrameNumber());
log.warn("Non-consecutive cam frame for cam {}: {} -> {}", frame.getCameraId(), lastCamFrameId,
frame.getCamFrameNumber());
}
frameIntervalFilter.reset();
}
......
package edu.tigers.sumatra.vision;
/*
* Copyright (c) 2009 - 2021, DHBW Mannheim - TIGERs Mannheim
*/
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
package edu.tigers.sumatra.vision;
import com.github.g3force.configurable.ConfigRegistration;
import com.github.g3force.configurable.Configurable;
import edu.tigers.sumatra.cam.data.CamRobot;
import edu.tigers.sumatra.ids.BotID;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
/**
* Track the overall quality/visibility of each robot by counting the number of detection over a fixed time horizon.
......@@ -42,13 +45,13 @@ public class RobotQualityInspector
}
public void addDetection(CamRobot camRobot)
public synchronized void addDetection(CamRobot camRobot)
{
measurements.get(camRobot.getBotId()).add(camRobot.getTimestamp());
}
public void prune(long currentTimestamp)
public synchronized void prune(long currentTimestamp)
{
long timestamp = currentTimestamp - (long) (trackingTimeHorizon * 1e9);
for (List<Long> timestamps : measurements.values())
......@@ -67,7 +70,7 @@ public class RobotQualityInspector
}
public void updateAverageDt(double averageDt)
public synchronized void updateAverageDt(double averageDt)
{
maxPossibleDetectionsPerCam = trackingTimeHorizon / averageDt;
}
......@@ -79,19 +82,19 @@ public class RobotQualityInspector
}
public long getNumDetections(final BotID botID)
public synchronized long getNumDetections(final BotID botID)
{
return measurements.get(botID).size();
}
public double getPossibleDetections()
public synchronized double getPossibleDetections()
{
return maxPossibleDetectionsPerCam;
}
public boolean passesQualityInspection(final BotID botID)
public synchronized boolean passesQualityInspection(final BotID botID)
{
return getQuality(botID) > robotQualityThreshold;
}
......
/*
* Copyright (c) 2009 - 2017, DHBW Mannheim - TIGERs Mannheim
* Copyright (c) 2009 - 2021, DHBW Mannheim - TIGERs Mannheim
*/
package edu.tigers.sumatra.vision;
import java.awt.Color;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
import com.github.g3force.configurable.ConfigRegistration;
import com.github.g3force.configurable.Configurable;
import edu.tigers.sumatra.cam.data.CamCalibration;
import edu.tigers.sumatra.cam.data.CamDetectionFrame;
import edu.tigers.sumatra.cam.data.CamGeometry;
......@@ -30,22 +19,32 @@ import edu.tigers.sumatra.math.vector.IVector2;
import edu.tigers.sumatra.math.vector.Vector2;
import edu.tigers.sumatra.math.vector.Vector2f;
import java.awt.Color;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
/**
* The viewport architect inspects all camera geometries and aligns their viewports to a predefined overlap.
*
*
* @author AndreR <andre@ryll.cc>
*/
public class ViewportArchitect
{
private Map<Integer, Viewport> viewports = new HashMap<>();
private Map<Integer, Viewport> viewports = new ConcurrentSkipListMap<>();
private final List<IViewportArchitect> observers = new CopyOnWriteArrayList<>();
private Viewport field;
@Configurable(defValue = "400.0", comment = "Maximum camera overlap. [mm]")
private static double maxViewportOverlap = 400.0;
@Configurable(defValue = "DYNAMICALLY", comment = "Method to be used to construct viewports.")
private static EViewportConstruction viewportConstruction = EViewportConstruction.DYNAMICALLY;
......
......@@ -35,7 +35,6 @@ import edu.tigers.sumatra.vision.data.KickEvent;
import edu.tigers.sumatra.vision.kick.estimators.IBallModelIdentResult;
import edu.tigers.sumatra.vision.tracker.BallTracker;
import edu.tigers.sumatra.vision.tracker.RobotTracker;
import lombok.AllArgsConstructor;
import lombok.extern.log4j.Log4j2;
import java.awt.Color;
......@@ -45,7 +44,6 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
......@@ -59,12 +57,9 @@ import java.util.stream.Collectors;
public class VisionFilterImpl extends AVisionFilter
implements IViewportArchitect, IBallModelIdentificationObserver
{
@Configurable(defValue = "0.0125", comment = "Publish frequency")
@Configurable(defValue = "0.0125", comment = "Publish frequency (requires restart)")
private static double publishDt = 0.0125;
@Configurable(defValue = "false", comment = "When using a publish thread, compensate the frame age with prediction")
private static boolean predictToNow = false;
static
{
ConfigRegistration.registerClass("vision", VisionFilterImpl.class);
......@@ -77,38 +72,22 @@ public class VisionFilterImpl extends AVisionFilter
private final RobotQualityInspector robotQualityInspector = new RobotQualityInspector();
private Map<Integer, CamFilter> cams = new ConcurrentHashMap<>();
private VisionFilterFrame lastFrame = new VisionFilterFrame(System.nanoTime(),
FilteredVisionFrame.createEmptyFrame());
private FilteredVisionFrame lastFrame = FilteredVisionFrame.createEmptyFrame();
private KickEvent lastKickEvent;
private BallFilterOutput lastBallFilterOutput = new BallFilterOutput(
lastFrame.frame.getBall(),
lastFrame.frame.getBall().getPos(),
lastFrame.getBall(),
lastFrame.getBall().getPos(),
new BallFilterPreprocessorOutput(null, null, null)
);
private ExecutorService filterExecutor = null;
private ScheduledExecutorService publisherExecutor = null;
private Publisher publisher = new Publisher();
private ScheduledExecutorService publisherExecutor;
private class Publisher implements Runnable
private void publish()
{
@Override
public void run()
{
var currentFrame = lastFrame;
long frameNow = currentFrame.frame.getTimestamp();
if (predictToNow)
{
var systemNow = System.nanoTime();
var frameAge = Math.max(0, systemNow - currentFrame.systemTimestamp);
frameNow += frameAge;
}
var extrapolatedFrame = extrapolateFilteredFrame(currentFrame.frame, frameNow);
publishFilteredVisionFrame(extrapolatedFrame);
}
lastFrame = constructFilteredVisionFrame(lastFrame);
var extrapolatedFrame = extrapolateFilteredFrame(lastFrame, lastFrame.getTimestamp());
publishFilteredVisionFrame(extrapolatedFrame);
}
......@@ -141,28 +120,20 @@ public class VisionFilterImpl extends AVisionFilter
@Override
protected void updateCamDetectionFrame(final CamDetectionFrame camDetectionFrame)
{
long tReceive = System.nanoTime();
if (camDetectionFrame.gettCapture() <= 0)
{
// skip negative timestamps. They can produce unexpected behavior
return;
}
if (filterExecutor == null)
processCamDetectionFrame(camDetectionFrame);
if (publisherExecutor == null)
{
var newFrame = processDetectionFrame(camDetectionFrame, lastFrame.frame);
var extrapolatedFrame = extrapolateFilteredFrame(newFrame, newFrame.getTimestamp());
publishFilteredVisionFrame(extrapolatedFrame);
lastFrame = new VisionFilterFrame(tReceive, newFrame);
} else
{
filterExecutor.submit(() ->
lastFrame = new VisionFilterFrame(tReceive, processDetectionFrame(camDetectionFrame, lastFrame.frame))
);
publish();
}
}
private FilteredVisionFrame processDetectionFrame(CamDetectionFrame camDetectionFrame, FilteredVisionFrame lastFrame)
private void processCamDetectionFrame(CamDetectionFrame camDetectionFrame)
{
int camId = camDetectionFrame.getCameraId();
......@@ -170,19 +141,28 @@ public class VisionFilterImpl extends AVisionFilter
viewportArchitect.newDetectionFrame(camDetectionFrame);
// add camera if it does not exist yet
cams.computeIfAbsent(camId, CamFilter::new);
var camFilter = cams.computeIfAbsent(camId, CamFilter::new);
// set viewport
cams.get(camId).updateViewport(viewportArchitect.getViewport(camId));
camFilter.updateViewport(viewportArchitect.getViewport(camId));
// update robot infos on all camera filters
cams.get(camId).setRobotInfoMap(getRobotInfoMap());
camFilter.setRobotInfoMap(getRobotInfoMap());
// set latest ball info on all camera filters (to generate virtual balls from barrier info)
cams.get(camId).setBallInfo(lastBallFilterOutput);
camFilter.setBallInfo(lastBallFilterOutput);
// update camera filter with new detection frame
long timestamp = cams.get(camId).update(camDetectionFrame, lastFrame);
camFilter.update(camDetectionFrame, lastFrame);
// update robot quality inspector
camDetectionFrame.getRobots().forEach(robotQualityInspector::addDetection);
}
private FilteredVisionFrame constructFilteredVisionFrame(FilteredVisionFrame lastFrame)
{
long timestamp = cams.values().stream().mapToLong(CamFilter::getTimestamp).max().orElse(lastFrame.getTimestamp());
// use newest timestamp to prevent negative delta time in filtered frames
timestamp = Math.max(lastFrame.getTimestamp(), timestamp);
......@@ -191,7 +171,6 @@ public class VisionFilterImpl extends AVisionFilter
List<FilteredVisionBot> mergedRobots = mergeRobots(cams.values(), timestamp);
// update robot quality inspector
camDetectionFrame.getRobots().forEach(robotQualityInspector::addDetection);
robotQualityInspector.prune(timestamp);
final double averageDt = cams.values().stream().mapToDouble(CamFilter::getAverageFrameDt).max().orElse(0.01);
robotQualityInspector.updateAverageDt(averageDt);
......@@ -301,13 +280,7 @@ public class VisionFilterImpl extends AVisionFilter
@Override
public void onNewCameraGeometry(final CamGeometry geometry)
{
if (filterExecutor == null)
{
processGeometryFrame(geometry);
} else
{
filterExecutor.submit(() -> Safe.run(this::processGeometryFrame, geometry));
}
processGeometryFrame(geometry);
}
......@@ -347,12 +320,10 @@ public class VisionFilterImpl extends AVisionFilter
if (useThreads)
{
filterExecutor = Executors
.newSingleThreadScheduledExecutor(new NamedThreadFactory("VisionFilter Processor"));
publisherExecutor = Executors
.newSingleThreadScheduledExecutor(new NamedThreadFactory("VisionFilter Publisher"));
publisherExecutor
.scheduleAtFixedRate(() -> Safe.run(publisher), 0, (long) (publishDt * 1e9), TimeUnit.NANOSECONDS);
.scheduleAtFixedRate(() -> Safe.run(this::publish), 0, (long) (publishDt * 1e9), TimeUnit.NANOSECONDS);
log.info("Using threaded VisionFilter");
}
}
......@@ -362,11 +333,6 @@ public class VisionFilterImpl extends AVisionFilter
protected void stop()
{
super.stop();
if (filterExecutor != null)
{
filterExecutor.shutdown();
filterExecutor = null;
}
if (publisherExecutor != null)
{
publisherExecutor.shutdown();
......@@ -376,7 +342,7 @@ public class VisionFilterImpl extends AVisionFilter
viewportArchitect.removeObserver(this);
ballFilterPreprocessor.removeObserver(this);
ballFilterPreprocessor.clear();
lastFrame = new VisionFilterFrame(System.nanoTime(), FilteredVisionFrame.createEmptyFrame());
lastFrame = FilteredVisionFrame.createEmptyFrame();
}
......@@ -401,7 +367,7 @@ public class VisionFilterImpl extends AVisionFilter
super.onClearCamFrame();
cams.clear();
ballFilterPreprocessor.clear();
lastFrame = new VisionFilterFrame(System.nanoTime(), FilteredVisionFrame.createEmptyFrame());
lastFrame = FilteredVisionFrame.createEmptyFrame();
}
......@@ -493,12 +459,4 @@ public class VisionFilterImpl extends AVisionFilter
}
return shapes;
}
@AllArgsConstructor
private static class VisionFilterFrame
{
long systemTimestamp;
FilteredVisionFrame frame;
}
}
/*
* Copyright (c) 2009 - 2020, DHBW Mannheim - TIGERs Mannheim
* Copyright (c) 2009 - 2021, DHBW Mannheim - TIGERs Mannheim
*/
package edu.tigers.sumatra.presenter.log;
......@@ -66,7 +66,8 @@ public class LogEventBuffer
public synchronized List<LogEvent> getNewEvents()
{
List<LogEvent> subList = new ArrayList<>(eventStorage.subList((int) (start - offset), (int) (end - offset)));
List<LogEvent> subList = new ArrayList<>(
eventStorage.subList((int) Math.max(0, start - offset), (int) (end - offset)));
start = end;
return subList;
}
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment