OK, it looks like I do not understand Gadgeteer timers. Here’s v1 of a fragment of some code I’m having trouble with:
private GT.Timer _DatasetTitleTimer;
void ProgramStarted()
{
_DatasetTitleTimer = new GT.Timer(3000, GT.Timer.BehaviorType.RunOnce);
_DatasetTitleTimer.Tick += _DatasetTitleTimer_Tick;
}
private void Poller_NewDataAvailable(object source, NewDataEventArgs args)
{
if (!DisksCalibrating() && !_DatasetTitleTimer.IsRunning)
{
if (_LatestData == null || args.DataSetId != _LatestData.DataSetId)
{
SendKeyOverSerial(args);
_DatasetTitleTimer.Start();
}
else
{
if (!_LatestData.DataEquals(args))
{
SendDisksToPosition(args.Data);
}
}
_LatestData = args;
}
}
private void _DatasetTitleTimer_Tick(GT.Timer timer)
{
SendDisksToPosition(_LatestData.Data);
}
Or this rewrite
private void Poller_NewDataAvailable(object source, NewDataEventArgs args)
{
if (!DisksCalibrating())
{
if (_LatestData == null || args.DataSetId != _LatestData.DataSetId)
{
SendKeyOverSerial(args);
var datasetTitleTimer = new GT.Timer(3000, GT.Timer.BehaviorType.RunOnce);
datasetTitleTimer.Tick += _DatasetTitleTimer_Tick;
datasetTitleTimer.Start();
}
else
{
if (!_LatestData.DataEquals(args))
{
SendDisksToPosition(args.Data);
}
}
_LatestData = args;
}
}
private void _DatasetTitleTimer_Tick(GT.Timer timer)
{
timer.Tick -= _DatasetTitleTimer_Tick;
Debug.Print("_DatasetTitleTimer_Tick at " + _Poller.When());
SendDisksToPosition(_LatestData.Data);
}
In both cases I would expect that the timer would fire after roughly 3 seconds (3,000 milliseconds). I do not think of netmf as real time so I would not expect the timer to fire after exactly 3 seconds but what I see in practise is the first time it fires after 3 seconds, the second run through it’s about 12 seconds, and thereafter it is anything between 8 and 19 seconds. How can a 3000 millisecond timer routinely take 19 seconds to fire? What have I misunderstood about Gadgeteer timers and their use? How can I put that right?