package org.eclipse.linuxtools.tmf.ui.tests.histogram;
+import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+import org.eclipse.linuxtools.tmf.core.timestamp.ITmfTimestamp;
+import org.eclipse.linuxtools.tmf.core.timestamp.TmfTimeRange;
+import org.eclipse.linuxtools.tmf.core.timestamp.TmfTimestamp;
import org.eclipse.linuxtools.tmf.ui.views.histogram.HistogramDataModel;
import org.eclipse.linuxtools.tmf.ui.views.histogram.HistogramScaledData;
import org.eclipse.linuxtools.tmf.ui.views.histogram.IHistogramModelListener;
@Test
public void testHistogramDataModel() {
HistogramDataModel model = new HistogramDataModel();
- assertTrue(model.getNbBuckets() == HistogramDataModel.DEFAULT_NUMBER_OF_BUCKETS);
- assertTrue(model.getNbEvents() == 0);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == 0);
- assertTrue(model.getTimeLimit() == HistogramDataModel.DEFAULT_NUMBER_OF_BUCKETS);
+ testModelConsistency(model, HistogramDataModel.DEFAULT_NUMBER_OF_BUCKETS,0, 1, 0 , 0 , 0 , HistogramDataModel.DEFAULT_NUMBER_OF_BUCKETS);
}
/**
public void testHistogramDataModelInt() {
final int nbBuckets = 5 * 1000;
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- assertTrue(model.getNbEvents() == 0);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == 0);
- assertTrue(model.getTimeLimit() == nbBuckets);
+ testModelConsistency(model, nbBuckets, 0, 1, 0, 0, 0, nbBuckets);
}
/**
HistogramDataModel model = new HistogramDataModel(nbBuckets);
model.countEvent(0, -1);
- assertTrue(model.getNbEvents() == 0);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == 0);
- assertTrue(model.getTimeLimit() == nbBuckets);
+ testModelConsistency(model, nbBuckets, 0, 1, 0, 0, 0, nbBuckets);
}
/**
HistogramDataModel model = new HistogramDataModel(nbBuckets);
model.countEvent(0, -1);
- assertTrue(model.getNbEvents() == 0);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == 0);
- assertTrue(model.getTimeLimit() == nbBuckets);
+ testModelConsistency(model, nbBuckets, 0, 1, 0, 0, 0, nbBuckets);
}
/**
HistogramDataModel model = new HistogramDataModel(nbBuckets);
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
+
for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == 0);
+ assertEquals(0, result.fData[i]);
}
- assertTrue(model.getNbEvents() == 0);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == 0);
- assertTrue(model.getTimeLimit() == nbBuckets);
+ testModelConsistency(model, nbBuckets, 0, 1, 0, 0, 0, nbBuckets);
}
/**
model.countEvent(0, 1);
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
- assertTrue(result.fData[0] == 1);
- for (int i = 1; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == 0);
- }
+ assertEquals(1, result.fData[0]);
+
+ assertArrayEqualsInt(0, result.fData,1);
- assertTrue(model.getNbEvents() == 1);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 1);
- assertTrue(model.getStartTime() == 1);
- assertTrue(model.getEndTime() == 1);
- assertTrue(model.getTimeLimit() == nbBuckets + 1);
+ testModelConsistency(model, nbBuckets, 1, 1, 1, 1, 1, nbBuckets + 1);
}
/**
final int maxHeight = 10;
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbBuckets; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbBuckets, model);
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == 1);
- }
- assertTrue(model.getNbEvents() == nbBuckets);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == nbBuckets - 1);
- assertTrue(model.getTimeLimit() == nbBuckets);
+ assertArrayEqualsInt(1, result.fData);
+
+ testModelConsistency(model, nbBuckets, nbBuckets, 1, 0, 0, nbBuckets - 1, nbBuckets);
}
/**
final int maxHeight = 10;
HistogramDataModel model = new HistogramDataModel(nbBuckets);
+ // to different to call elsewhere
for (int i = 0; i < nbBuckets; i++) {
model.countEvent(i, i);
- model.countEvent(i+1, i);
+ model.countEvent(i + 1, i);
}
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == 2);
- }
- assertTrue(model.getNbEvents() == 2 * nbBuckets);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == nbBuckets - 1);
- assertTrue(model.getTimeLimit() == nbBuckets);
+ assertArrayEqualsInt(2, result.fData);
+
+ testModelConsistency(model, nbBuckets, 2 * nbBuckets, 1, 0, 0, nbBuckets- 1, nbBuckets);
}
+
/**
* Test methods for {@link HistogramDataModel#countEvent(long,long)} and
* {@link HistogramDataModel#scaleTo(int,int,int)}.
}
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == 1);
- }
- assertTrue(model.getNbEvents() == nbBuckets);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == startTime);
- assertTrue(model.getStartTime() == startTime);
- assertTrue(model.getEndTime() == startTime + nbBuckets - 1);
- assertTrue(model.getTimeLimit() == startTime + nbBuckets);
+ assertArrayEqualsInt(1, result.fData);
+
+ testModelConsistency(model, nbBuckets, nbBuckets, 1, startTime, startTime, startTime + nbBuckets- 1, startTime + nbBuckets);
}
/**
HistogramDataModel model = new HistogramDataModel(10);
try {
model.scaleTo(10, 0, 1);
- }
- catch (AssertionError e1) {
+ } catch (AssertionError e1) {
try {
model.scaleTo(0, 10, 1);
- }
- catch (AssertionError e2) {
+ } catch (AssertionError e2) {
try {
model.scaleTo(0, 0, 1);
- }
- catch (AssertionError e3) {
+ } catch (AssertionError e3) {
return;
}
}
final int[] expectedResult = new int[] { 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == expectedResult[i]);
- }
- assertTrue(model.getNbEvents() == nbEvents);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == nbEvents - 1);
- assertTrue(model.getTimeLimit() == nbBuckets);
+ assertArrayEquals( expectedResult, result.fData);
+
+ testModelConsistency(model, nbBuckets, nbEvents, 1, 0, 0, nbEvents - 1, nbBuckets);
}
/**
final int[] expectedResult = new int[] { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 };
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == expectedResult[i]);
- }
- assertTrue(model.getNbEvents() == nbEvents);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 1);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == nbEvents - 1);
- assertTrue(model.getTimeLimit() == nbBuckets);
+ assertArrayEquals(expectedResult, result.fData);
+
+ testModelConsistency(model, nbBuckets, nbEvents, 1, 0, 0, nbEvents - 1, nbBuckets);
}
/**
final int[] expectedResult = new int[] { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 };
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == expectedResult[i]);
- }
- assertTrue(model.getNbEvents() == nbEvents);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 2);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == nbEvents - 1);
- assertTrue(model.getTimeLimit() == 2 * nbBuckets);
+ assertArrayEquals(expectedResult, result.fData);
+
+ testModelConsistency(model, nbBuckets, nbEvents, 2, 0, 0, nbEvents - 1, 2 * nbBuckets);
}
/**
final int[] expectedResult = new int[] { 4, 4, 4, 4, 4, 4, 4, 2, 0, 0 };
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == expectedResult[i]);
- }
- assertTrue(model.getNbEvents() == nbEvents);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 4);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == nbEvents - 1);
- assertTrue(model.getTimeLimit() == 4 * nbBuckets);
+ assertArrayEquals(expectedResult, result.fData);
+
+ testModelConsistency(model, nbBuckets, nbEvents, 4, 0, 0, nbEvents - 1, 4 * nbBuckets);
}
/**
final int[] expectedResult = new int[] { 20, 20, 20, 20, 20, 20, 20, 20, 20, 20 };
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
HistogramScaledData result = model.scaleTo(10, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == expectedResult[i]);
- }
- assertTrue(model.getNbEvents() == nbEvents);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 2);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getEndTime() == nbEvents - 1);
- assertTrue(model.getTimeLimit() == 2 * nbBuckets);
+ assertArrayEquals(expectedResult, result.fData);
+
+ testModelConsistency(model, nbBuckets, nbEvents, 2, 0, 0, nbEvents - 1, 2 * nbBuckets);
}
/**
final int[] expectedResult = new int[] { 24, 24, 24, 24, 24, 24, 24, 24, 9, 0 };
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
HistogramScaledData result = model.scaleTo(10, maxHeight, 1);
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == expectedResult[i]);
- }
- assertTrue(model.getNbEvents() == nbEvents);
- assertTrue(model.getNbBuckets() == nbBuckets);
- assertTrue(model.getBucketDuration() == 4);
- assertTrue(model.getFirstBucketTime() == 0);
- assertTrue(model.getStartTime() == 0);
- assertTrue(model.getEndTime() == nbEvents - 1);
- assertTrue(model.getTimeLimit() == 4 * nbBuckets);
+ assertArrayEquals(expectedResult, result.fData);
+
+ testModelConsistency(model, nbBuckets, nbEvents, 4, 0, 0, nbEvents - 1, 4 * nbBuckets);
}
/**
// (int)(width / barWith) = 2
// -> 2 bars -> expected result needs two buckets (scaled data)
//
- // buckets (in model) per bar = last bucket id / nbBars + 1 (plus 1 to cover all used buckets)
+ // buckets (in model) per bar = last bucket id / nbBars + 1 (plus 1 to
+ // cover all used buckets)
// -> buckets per bar = 50 / 2 + 1 = 26
// -> first entry in expected result is 26 * 4 = 104
// -> second entry in expected result is 22 * 4 + 9 = 97
final int[] expectedResult = new int[] { 104, 97 };
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
// verify scaled data
HistogramScaledData result = model.scaleTo(width, maxHeight, barWidth);
assertEquals(0, result.fFirstEventTime);
assertEquals(1, result.fLastBucket);
assertEquals(104, result.fMaxValue);
- assertEquals((double)maxHeight/104, result.fScalingFactor, DELTA);
+ assertEquals((double) maxHeight / 104, result.fScalingFactor, DELTA);
assertEquals(maxHeight, result.fHeight);
assertEquals(width, result.fWidth);
assertEquals(barWidth, result.fBarWidth);
- for (int i = 0; i < result.fData.length; i++) {
- assertEquals(expectedResult[i], result.fData[i]);
- }
+ assertArrayEquals(expectedResult, result.fData);
// verify model
- assertEquals(nbEvents, model.getNbEvents());
- assertEquals(nbBuckets, model.getNbBuckets());
- assertEquals(4, model.getBucketDuration());
- assertEquals(0, model.getFirstBucketTime());
- assertEquals(0, model.getStartTime());
- assertEquals(nbEvents - 1, model.getEndTime());
- assertEquals(4 * nbBuckets, model.getTimeLimit());
+ testModelConsistency(model, nbBuckets, nbEvents, 4, 0, 0, nbEvents - 1, 4 * nbBuckets);
}
/**
// (int)(width / barWith) = 10
// -> 10 bars -> expected result needs 10 buckets (scaled data)
//
- // buckets in (model) per bar = last bucket id / nbBars + 1 (plus 1 to cover all used buckets)
+ // buckets in (model) per bar = last bucket id / nbBars + 1 (plus 1 to
+ // cover all used buckets)
// -> buckets per bar = 50 / 10 + 1 = 6
final int[] expectedResult = new int[] { 21, 24, 24, 24, 24, 24, 24, 24, 12, 0 };
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = nbEvents - 1; i >= 0; i--) {
- model.countEvent(i, i);
- }
+ countInvertedEvents(nbEvents, model);
// verify scaled data
HistogramScaledData result = model.scaleTo(width, maxHeight, barWidth);
assertEquals(4 * 6, result.fBucketDuration);
assertEquals(0, result.fSelectionBeginBucket);
assertEquals(0, result.fSelectionEndBucket);
- assertEquals(-3, result.fFirstBucketTime); // negative is correct, can happen when reverse
+ assertEquals(-3, result.fFirstBucketTime); // negative is correct, can
+ // happen when reverse
assertEquals(0, result.fFirstEventTime);
assertEquals(9, result.fLastBucket);
assertEquals(24, result.fMaxValue);
- assertEquals((double)maxHeight/24, result.fScalingFactor, DELTA);
+ assertEquals((double) maxHeight / 24, result.fScalingFactor, DELTA);
assertEquals(maxHeight, result.fHeight);
assertEquals(width, result.fWidth);
assertEquals(barWidth, result.fBarWidth);
- for (int i = 0; i < result.fData.length; i++) {
- assertEquals(expectedResult[i], result.fData[i]);
- }
+ assertArrayEquals(expectedResult, result.fData);
// verify model
- assertEquals(nbEvents, model.getNbEvents());
- assertEquals(nbBuckets, model.getNbBuckets());
- assertEquals(4, model.getBucketDuration());
- assertEquals(-3, model.getFirstBucketTime());
- assertEquals(0, model.getStartTime());
- assertEquals(nbEvents - 1, model.getEndTime());
- assertEquals(-3 + 4 * nbBuckets, model.getTimeLimit());
+ testModelConsistency(model, nbBuckets, nbEvents, 4, -3, 0, nbEvents - 1, -3 + 4 * nbBuckets);
+ }
+
+ private static void countInvertedEvents(final int nbEvents, HistogramDataModel model) {
+ for (int i = nbEvents - 1; i >= 0; i--) {
+ model.countEvent(i, i);
+ }
}
/**
final int nbEvents = 2 * nbBuckets;
HistogramDataModel model = new HistogramDataModel(nbBuckets);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
HistogramScaledData result = model.scaleTo(width, maxHeight, barWidth);
model.clear();
- for (int i = nbEvents -1; i >= 0; i--) {
- model.countEvent(i, i);
- }
+ countInvertedEvents(nbEvents, model);
HistogramScaledData revResult = model.scaleTo(width, maxHeight, barWidth);
- assertEquals(nbEvents, model.getNbEvents());
- assertEquals(nbBuckets, model.getNbBuckets());
- assertEquals(2, model.getBucketDuration());
- assertEquals(0, model.getFirstBucketTime());
- assertEquals(0, model.getStartTime());
- assertEquals(nbEvents - 1, model.getEndTime());
- assertEquals(2 * nbBuckets, model.getTimeLimit());
+ testModelConsistency(model, nbBuckets, nbEvents, 2, 0, 0, nbEvents - 1, 2 * nbBuckets);
// For the above number of events, result and revResult are exactly the same.
+
assertEquals(result.fBucketDuration, revResult.fBucketDuration);
assertEquals(result.fSelectionBeginBucket, revResult.fSelectionBeginBucket);
assertEquals(result.fSelectionEndBucket, revResult.fSelectionEndBucket);
assertEquals(result.getBucketEndTime(0), revResult.getBucketEndTime(0));
assertEquals(result.getBucketStartTime(0), revResult.getBucketStartTime(0));
- for (int i = 0; i < result.fData.length; i++) {
- assertTrue(result.fData[i] == revResult.fData[i]);
- }
+ assertArrayEquals(revResult.fData, result.fData);
}
/**
final int nbBuckets = 2000;
final int nbEvents = 10 * nbBuckets + 256;
final int[] count = new int[1];
- count [0] = 0;
+ count[0] = 0;
// Test add listener and call of listener
IHistogramModelListener listener = new IHistogramModelListener() {
// Test that the listener interface is called every 16000 events.
HistogramDataModel model = new HistogramDataModel(nbBuckets);
model.addHistogramListener(listener);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i+1, i);
- }
- assertTrue(count[0] == 1);
+ countEventsInModel(nbEvents, model, 1);
+
+ assertEquals(1, count[0]);
// Test that the listener interface is called when complete is called.
model.complete();
- assertTrue(count[0] == 2);
+ assertEquals(2, count[0]);
// Test that clear triggers call of listener interface
model.clear();
- assertTrue(count[0] == 3);
+ assertEquals(3, count[0]);
// Test remove listener
count[0] = 0;
model.removeHistogramListener(listener);
- for (int i = 0; i < nbEvents; i++) {
- model.countEvent(i, i);
- }
+ countEventsInModel(nbEvents, model);
model.complete();
- assertTrue(count[0] == 0);
+ assertEquals(0, count[0]);
+ }
+
+ /**
+ * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
+ */
+ @Test
+ public void testLostEventsScaleTo_0() {
+ final int nbBuckets = 10;
+ final int maxHeight = 10;
+ final int nbEvents = 3 * nbBuckets;
+ final int nbLostEvents_0 = 4;
+ final int nbLostEvents_1 = 9;
+ final int nbCombinedEvents = nbEvents + 2;
+ final int[] expectedResult = new int[] { 4, 4, 4, 4, 4, 4, 4, 2, 0, 0 };
+ final int[] expectedLostEventsResult = new int[] { 0, 2, 2, 0, 3, 3, 3, 0, 0, 0 };
+
+ HistogramDataModel model = new HistogramDataModel(nbBuckets);
+ countEventsInModel(nbEvents, model);
+
+ final TmfTimeRange timeRange_0 = new TmfTimeRange(
+ new TmfTimestamp(5L, ITmfTimestamp.NANOSECOND_SCALE),
+ new TmfTimestamp(10L, ITmfTimestamp.NANOSECOND_SCALE));
+ model.countLostEvent(timeRange_0, nbLostEvents_0, false);
+
+ final TmfTimeRange timeRange_1 = new TmfTimeRange(
+ new TmfTimestamp(18L, ITmfTimestamp.NANOSECOND_SCALE),
+ new TmfTimestamp(27L, ITmfTimestamp.NANOSECOND_SCALE));
+ model.countLostEvent(timeRange_1, nbLostEvents_1, false);
+
+ HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
+
+ assertArrayEquals(expectedResult, result.fData);
+
+ assertArrayEquals(expectedLostEventsResult, result.fLostEventsData);
+
+ testModelConsistency(model, nbBuckets, nbCombinedEvents, 4, 0, 0, nbEvents - 1, 4 * nbBuckets);
+ assertEquals(7, result.fMaxCombinedValue);
+ }
+
+ /**
+ * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
+ */
+ @Test
+ public void testLostEventsScaleTo_1() {
+ final int nbBuckets = 10;
+ final int maxHeight = 10;
+ final int nbEvents = 3 * nbBuckets;
+ final int nbLostEvents_0 = 4;
+ final int nbLostEvents_1 = 9;
+ final int nbCombinedEvents = nbEvents + 2;
+ final int[] expectedLostEventsResult = new int[] { 0, 2, 5, 3, 3, 0, 0, 0, 0, 0 };
+
+ HistogramDataModel model = new HistogramDataModel(nbBuckets);
+ countEventsInModel(nbEvents, model);
+
+ final TmfTimeRange timeRange_0 = new TmfTimeRange(
+ new TmfTimestamp(5L, ITmfTimestamp.NANOSECOND_SCALE),
+ new TmfTimestamp(10L, ITmfTimestamp.NANOSECOND_SCALE));
+ model.countLostEvent(timeRange_0, nbLostEvents_0, false);
+
+ final TmfTimeRange timeRange_1 = new TmfTimeRange(
+ new TmfTimestamp(11L, ITmfTimestamp.NANOSECOND_SCALE),
+ new TmfTimestamp(18L, ITmfTimestamp.NANOSECOND_SCALE));
+ model.countLostEvent(timeRange_1, nbLostEvents_1, false);
+
+ HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
+
+ assertArrayEquals(expectedLostEventsResult, result.fLostEventsData);
+
+ testModelConsistency(model, nbBuckets, nbCombinedEvents, 4, 0, 0, nbEvents - 1, 4 * nbBuckets);
+ assertEquals(9, result.fMaxCombinedValue);
}
+
+ /**
+ * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
+ */
+ @Test
+ public void testLostEventsScaleTo_2() {
+ final int nbBuckets = 10;
+ final int maxHeight = 10;
+ final int nbEvents = 3 * nbBuckets;
+ final int nbLostEvents_0 = 5;
+ final int nbLostEvents_1 = 15;
+ final int nbLostEvents_2 = 2;
+ final int nbCombinedEvents = nbEvents + 3;
+ final int[] expectedLostEventsResult = new int[] { 0, 0, 3, 3, 6, 5, 3, 2, 0, 0 };
+
+ HistogramDataModel model = new HistogramDataModel(nbBuckets);
+ countEventsInModel(nbEvents, model);
+
+ final TmfTimeRange timeRange_0 = new TmfTimeRange(
+ new TmfTimestamp(18L, ITmfTimestamp.NANOSECOND_SCALE),
+ new TmfTimestamp(22L, ITmfTimestamp.NANOSECOND_SCALE));
+ model.countLostEvent(timeRange_0, nbLostEvents_0, false);
+
+ final TmfTimeRange timeRange_2 = new TmfTimeRange(
+ new TmfTimestamp(28L, ITmfTimestamp.NANOSECOND_SCALE),
+ new TmfTimestamp(29L, ITmfTimestamp.NANOSECOND_SCALE));
+ model.countLostEvent(timeRange_2, nbLostEvents_2, false);
+
+ final TmfTimeRange timeRange_1 = new TmfTimeRange(
+ new TmfTimestamp(11L, ITmfTimestamp.NANOSECOND_SCALE),
+ new TmfTimestamp(26L, ITmfTimestamp.NANOSECOND_SCALE));
+ model.countLostEvent(timeRange_1, nbLostEvents_1, false);
+
+ HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
+
+ assertArrayEquals(expectedLostEventsResult, result.fLostEventsData );
+
+ testModelConsistency(model, nbBuckets, nbCombinedEvents, 4, 0, 0, nbEvents - 1, 4 * nbBuckets);
+ assertEquals(10, result.fMaxCombinedValue);
+ }
+
+ /**
+ * Test method for {@link HistogramDataModel#scaleTo(int,int,int)}.
+ */
+ @Test
+ public void testLostEventsScaleTo_3() {
+ final int nbBuckets = 10;
+ final int maxHeight = 10;
+ final int nbEvents = 3 * nbBuckets;
+ final int nbLostEvents_0 = 23;
+ final int nbCombinedEvents = nbEvents + 1;
+ final int[] expectedLostEventsResult = new int[] { 0, 0, 5, 5, 5, 5, 3, 0, 0, 0 };
+
+ HistogramDataModel model = new HistogramDataModel(nbBuckets);
+ countEventsInModel(nbEvents, model);
+
+ final TmfTimeRange timeRange_0 = new TmfTimeRange(
+ new TmfTimestamp(11L, ITmfTimestamp.NANOSECOND_SCALE),
+ new TmfTimestamp(26L, ITmfTimestamp.NANOSECOND_SCALE));
+ model.countLostEvent(timeRange_0, nbLostEvents_0, false);
+
+ HistogramScaledData result = model.scaleTo(nbBuckets, maxHeight, 1);
+
+ assertArrayEquals(expectedLostEventsResult, result.fLostEventsData );
+
+ testModelConsistency(model, nbBuckets, nbCombinedEvents, 4, 0, 0, nbEvents - 1, 4 * nbBuckets);
+ assertEquals(9, result.fMaxCombinedValue);
+ }
+
+ /*
+ * helpers
+ */
+
+ private static void countEventsInModel(final int nbEvents, HistogramDataModel model) {
+ countEventsInModel(nbEvents, model, 0);
+ }
+
+ private static void countEventsInModel(final int nbEvents, HistogramDataModel model, int offset) {
+ countEventsInModel(nbEvents, model, offset, 0);
+ }
+
+ private static void countEventsInModel(final int nbEvents, HistogramDataModel model, int offset, int startTime) {
+ for (int i = startTime; i < nbEvents + startTime; i++) {
+ model.countEvent(i + offset, i);
+ }
+ }
+
+ private static void testModelConsistency(HistogramDataModel model, int numberOfBuckets,int nbEvents, int bucketduration,int firstBucketTime, int startTime, int endTime, int timeLimit) {
+ assertEquals(numberOfBuckets, model.getNbBuckets());
+ assertEquals(nbEvents, model.getNbEvents());
+ assertEquals(bucketduration, model.getBucketDuration());
+ assertEquals(firstBucketTime, model.getFirstBucketTime());
+ assertEquals(startTime, model.getStartTime());
+ assertEquals(endTime, model.getEndTime());
+ assertEquals(timeLimit, model.getTimeLimit());
+ }
+
+ private static void assertArrayEqualsInt(final int val , int[] result) {
+ assertArrayEqualsInt(val, result, 0);
+ }
+
+ private static void assertArrayEqualsInt(final int val , int[] result, int startVal ) {
+ for (int i = startVal; i < result.length; i++) {
+ assertEquals(val, result[i]);
+ }
+ }
+
}
private final Color fSelectionBackgroundColor = Display.getCurrent().getSystemColor(SWT.COLOR_WIDGET_BACKGROUND);
private final Color fLastEventColor = Display.getCurrent().getSystemColor(SWT.COLOR_DARK_RED);
private final Color fHistoBarColor = new Color(Display.getDefault(), 74, 112, 139);
+ private final Color fLostEventColor = new Color(Display.getCurrent(), 208, 62, 120);
// ------------------------------------------------------------------------
// Attributes
private Text fTimeRangeEndText;
/**
- * Histogram drawing area
+ * Histogram drawing area
*/
protected Canvas fCanvas;
/**
- * The histogram data model.
+ * The histogram data model.
*/
protected final HistogramDataModel fDataModel;
TmfSignalManager.deregister(this);
fHistoBarColor.dispose();
+ fLastEventColor.dispose();
fDataModel.removeHistogramListener(this);
}
return fDataModel;
}
+ /**
+ * Returns the text control for the maximum of events in one bar
+ *
+ * @return the text control
+ * @since 2.1
+ */
+ public Text getMaxNbEventsText() {
+ return fMaxNbEventsText;
+ }
+
// ------------------------------------------------------------------------
// Operations
// ------------------------------------------------------------------------
/**
* Increase the histogram bucket corresponding to [timestamp]
*
- * @param eventCount
- * The new event count
- * @param timestamp
- * The latest timestamp
+ * @param eventCount The new event count
+ * @param timestamp The latest timestamp
*/
public void countEvent(final long eventCount, final long timestamp) {
fDataModel.countEvent(eventCount, timestamp);
/**
* Sets the current event time and refresh the display
*
- * @param timestamp
- * The time of the current event
+ * @param timestamp The time of the current event
* @deprecated As of 2.1, use {@link #setSelection(long, long)}
*/
@Deprecated
/**
* Sets the current selection time range and refresh the display
*
- * @param beginTime
- * The begin time of the current selection
- * @param endTime
- * The end time of the current selection
+ * @param beginTime The begin time of the current selection
+ * @param endTime The end time of the current selection
* @since 2.1
*/
public void setSelection(final long beginTime, final long endTime) {
int index;
switch (keyCode) {
- case SWT.HOME:
- index = 0;
- while (index < fScaledData.fLastBucket && fScaledData.fData[index] == 0) {
- index++;
- }
- if (index < fScaledData.fLastBucket) {
- fScaledData.fSelectionBeginBucket = index;
- }
- break;
+ case SWT.HOME:
+ index = 0;
+ while (index < fScaledData.fLastBucket && fScaledData.fData[index] == 0) {
+ index++;
+ }
+ if (index < fScaledData.fLastBucket) {
+ fScaledData.fSelectionBeginBucket = index;
+ }
+ break;
- case SWT.ARROW_RIGHT:
- index = Math.max(0, fScaledData.fSelectionBeginBucket + 1);
- while (index < fScaledData.fWidth && fScaledData.fData[index] == 0) {
- index++;
- }
- if (index < fScaledData.fLastBucket) {
- fScaledData.fSelectionBeginBucket = index;
- }
- break;
+ case SWT.ARROW_RIGHT:
+ index = Math.max(0, fScaledData.fSelectionBeginBucket + 1);
+ while (index < fScaledData.fWidth && fScaledData.fData[index] == 0) {
+ index++;
+ }
+ if (index < fScaledData.fLastBucket) {
+ fScaledData.fSelectionBeginBucket = index;
+ }
+ break;
- case SWT.END:
- index = fScaledData.fLastBucket;
- while (index >= 0 && fScaledData.fData[index] == 0) {
- index--;
- }
- if (index >= 0) {
- fScaledData.fSelectionBeginBucket = index;
- }
- break;
+ case SWT.END:
+ index = fScaledData.fLastBucket;
+ while (index >= 0 && fScaledData.fData[index] == 0) {
+ index--;
+ }
+ if (index >= 0) {
+ fScaledData.fSelectionBeginBucket = index;
+ }
+ break;
- case SWT.ARROW_LEFT:
- index = Math.min(fScaledData.fLastBucket - 1, fScaledData.fSelectionBeginBucket - 1);
- while (index >= 0 && fScaledData.fData[index] == 0) {
- index--;
- }
- if (index >= 0) {
- fScaledData.fSelectionBeginBucket = index;
- }
- break;
+ case SWT.ARROW_LEFT:
+ index = Math.min(fScaledData.fLastBucket - 1, fScaledData.fSelectionBeginBucket - 1);
+ while (index >= 0 && fScaledData.fData[index] == 0) {
+ index--;
+ }
+ if (index >= 0) {
+ fScaledData.fSelectionBeginBucket = index;
+ }
+ break;
- default:
- return;
+ default:
+ return;
}
fScaledData.fSelectionEndBucket = fScaledData.fSelectionBeginBucket;
}
fDataModel.setSelection(fSelectionBegin, fSelectionEnd);
fScaledData = fDataModel.scaleTo(canvasWidth, canvasHeight, 1);
- synchronized(fDataModel) {
+ synchronized (fDataModel) {
if (fScaledData != null) {
fCanvas.redraw();
if (fDataModel.getNbEvents() != 0) {
- // Display histogram and update X-,Y-axis labels
+ // Display histogram and update X-,Y-axis
+ // labels
fTimeRangeStartText.setText(TmfTimestampFormat.getDefaulTimeFormat().format(fDataModel.getFirstBucketTime()));
fTimeRangeEndText.setText(TmfTimestampFormat.getDefaulTimeFormat().format(fDataModel.getEndTime()));
} else {
fTimeRangeStartText.setText(""); //$NON-NLS-1$
fTimeRangeEndText.setText(""); //$NON-NLS-1$
}
- fMaxNbEventsText.setText(Long.toString(fScaledData.fMaxValue));
+ long maxNbEvents = HistogramScaledData.hideLostEvents ? fScaledData.fMaxValue : fScaledData.fMaxCombinedValue;
+ fMaxNbEventsText.setText(Long.toString(maxNbEvents));
// The Y-axis area might need to be re-sized
fMaxNbEventsText.getParent().layout();
}
imageGC.fillRectangle(0, 0, image.getBounds().width + 1, image.getBounds().height + 1);
// Draw the histogram bars
- imageGC.setBackground(fHistoBarColor);
final int limit = width < scaledData.fWidth ? width : scaledData.fWidth;
+ double factor = HistogramScaledData.hideLostEvents ? scaledData.fScalingFactor : scaledData.fScalingFactorCombined;
for (int i = 0; i < limit; i++) {
- final int value = (int) Math.ceil(scaledData.fData[i] * scaledData.fScalingFactor);
- imageGC.fillRectangle(i, height - value, 1, value);
+ imageGC.setForeground(fHistoBarColor);
+ final int value = (int) Math.ceil(scaledData.fData[i] * factor);
+ imageGC.drawLine(i, height - value, i, height);
+
+ if (!HistogramScaledData.hideLostEvents) {
+ imageGC.setForeground(fLostEventColor);
+ final int lostEventValue = (int) Math.ceil(scaledData.fLostEventsData[i] * factor);
+ if (lostEventValue != 0) {
+ if (lostEventValue == 1) {
+ // in linux, a line from x to x is not drawn, in windows it is.
+ imageGC.drawPoint(i, height - value - 1);
+ } else {
+ // drawing a line is inclusive, so we need to remove 1 from the destination to have the correct length
+ imageGC.drawLine(i, height - value - lostEventValue, i, height - value - 1);
+ }
+ }
+ }
}
// Add a dashed line as a delimiter (at the right of the last bar)
}
final long endTime = fScaledData.getBucketEndTime(index);
final int nbEvents = (index >= 0) ? fScaledData.fData[index] : 0;
-
+ final String newLine = System.getProperty("line.separator"); //$NON-NLS-1$
final StringBuffer buffer = new StringBuffer();
buffer.append("Range = ["); //$NON-NLS-1$
buffer.append(new TmfTimestamp(startTime, ITmfTimestamp.NANOSECOND_SCALE).toString());
- buffer.append(","); //$NON-NLS-1$
+ buffer.append(',');
buffer.append(new TmfTimestamp(endTime, ITmfTimestamp.NANOSECOND_SCALE).toString());
- buffer.append(")\n"); //$NON-NLS-1$
+ buffer.append(')');
+ buffer.append(newLine);
buffer.append("Event count = "); //$NON-NLS-1$
buffer.append(nbEvents);
+ if (!HistogramScaledData.hideLostEvents) {
+ final int nbLostEvents = (index >= 0) ? fScaledData.fLostEventsData[index] : 0;
+ buffer.append(newLine);
+ buffer.append("Lost events count = "); //$NON-NLS-1$
+ buffer.append(nbLostEvents);
+ }
return buffer.toString();
}
/**
* Format the timestamp and update the display
*
- * @param signal the incoming signal
+ * @param signal
+ * the incoming signal
* @since 2.0
*/
@TmfSignalHandler
* Francois Chouinard - Moved from LTTng to TMF
* Francois Chouinard - Added support for empty initial buckets
* Patrick Tasse - Support selection range
+ * Jean-Christian Kouamé, Simon Delisle - Added support to manage lost events
*******************************************************************************/
package org.eclipse.linuxtools.tmf.ui.views.histogram;
import java.util.Arrays;
import org.eclipse.core.runtime.ListenerList;
+import org.eclipse.linuxtools.tmf.core.timestamp.TmfTimeRange;
/**
* Histogram-independent data model.
* be fed to the model in any order. If an event has a timestamp less than the
* <i>basetime</i>, the buckets will be moved to the right to account for the
* new smaller timestamp. The new <i>basetime</i> is a multiple of the bucket
- * duration smaller then the previous <i>basetime</i>. Note that the <i>basetime</i>
- * might not be anymore a timestamp of an event. If necessary, the buckets will
- * be compacted before moving to the right. This might be necessary to not
- * loose any event counts at the end of the buckets array.
+ * duration smaller then the previous <i>basetime</i>. Note that the
+ * <i>basetime</i> might no longer be the timestamp of an event. If necessary,
+ * the buckets will be compacted before moving to the right. This might be
+ * necessary to not lose any event counts at the end of the buckets array.
* <p>
* The mapping from the model to the UI is performed by the <i>scaleTo()</i>
* method. By keeping the number of buckets <i>n</i> relatively large with
// ------------------------------------------------------------------------
/**
- * The default number of buckets
+ * The default number of buckets
*/
public static final int DEFAULT_NUMBER_OF_BUCKETS = 16 * 1000;
// Bucket management
private final int fNbBuckets;
private final long[] fBuckets;
+ private final long[] fLostEventsBuckets;
private long fBucketDuration;
private long fNbEvents;
private int fLastBucket;
/**
* Default constructor with default number of buckets.
- * @param startTime The histogram start time
+ *
+ * @param startTime
+ * The histogram start time
* @since 2.0
*/
public HistogramDataModel(long startTime) {
/**
* Constructor with non-default number of buckets.
- * @param nbBuckets A number of buckets.
+ *
+ * @param nbBuckets
+ * A number of buckets.
*/
public HistogramDataModel(int nbBuckets) {
this(0, nbBuckets);
/**
* Constructor with non-default number of buckets.
- * @param startTime the histogram start time
- * @param nbBuckets A number of buckets.
+ *
+ * @param startTime
+ * the histogram start time
+ * @param nbBuckets
+ * A number of buckets.
* @since 2.0
*/
public HistogramDataModel(long startTime, int nbBuckets) {
fFirstBucketTime = fFirstEventTime = fLastEventTime = startTime;
fNbBuckets = nbBuckets;
fBuckets = new long[nbBuckets];
+ fLostEventsBuckets = new long[nbBuckets];
fModelListeners = new ListenerList();
clear();
}
/**
* Copy constructor.
- * @param other A model to copy.
+ *
+ * @param other
+ * A model to copy.
*/
public HistogramDataModel(HistogramDataModel other) {
fNbBuckets = other.fNbBuckets;
fBuckets = Arrays.copyOf(other.fBuckets, fNbBuckets);
+ fLostEventsBuckets = Arrays.copyOf(other.fLostEventsBuckets, fNbBuckets);
fBucketDuration = Math.max(other.fBucketDuration, 1);
fNbEvents = other.fNbEvents;
fLastBucket = other.fLastBucket;
/**
* Returns the number of events in the data model.
+ *
* @return number of events.
*/
public long getNbEvents() {
/**
* Returns the number of buckets in the model.
+ *
* @return number of buckets.
*/
public int getNbBuckets() {
return fNbBuckets;
}
- /**
- * Returns the current bucket duration.
- * @return bucket duration
- */
+ /**
+ * Returns the current bucket duration.
+ *
+ * @return bucket duration
+ */
public long getBucketDuration() {
return fBucketDuration;
}
/**
* Returns the time value of the first bucket in the model.
+ *
* @return time of first bucket.
*/
public long getFirstBucketTime() {
/**
* Returns the time of the first event in the model.
+ *
* @return time of first event.
*/
public long getStartTime() {
/**
* Sets the model start time
- * @param startTime the histogram range start time
- * @param endTime the histogram range end time
+ *
+ * @param startTime
+ * the histogram range start time
+ * @param endTime
+ * the histogram range end time
* @since 2.0
*/
public void setTimeRange(long startTime, long endTime) {
/**
* Returns the time of the last event in the model.
+ *
* @return the time of last event.
*/
public long getEndTime() {
/**
* Returns the time of the current event in the model.
+ *
* @return the time of the current event.
- * @deprecated As of 2.1, use {@link #getSelectionBegin()} and {@link #getSelectionEnd()}
+ * @deprecated As of 2.1, use {@link #getSelectionBegin()} and
+ * {@link #getSelectionEnd()}
*/
@Deprecated
public long getCurrentEventTime() {
/**
* Returns the begin time of the current selection in the model.
+ *
* @return the begin time of the current selection.
* @since 2.1
*/
/**
* Returns the end time of the current selection in the model.
+ *
* @return the end time of the current selection.
* @since 2.1
*/
/**
* Returns the time limit with is: start time + nbBuckets * bucketDuration
+ *
* @return the time limit.
*/
public long getTimeLimit() {
/**
* Add a listener to the model to be informed about model changes.
- * @param listener A listener to add.
+ *
+ * @param listener
+ * A listener to add.
*/
public void addHistogramListener(IHistogramModelListener listener) {
fModelListeners.add(listener);
/**
* Remove a given model listener.
- * @param listener A listener to remove.
+ *
+ * @param listener
+ * A listener to remove.
*/
public void removeHistogramListener(IHistogramModelListener listener) {
fModelListeners.remove(listener);
/**
* Clear the histogram model.
+ *
* @see org.eclipse.linuxtools.tmf.ui.views.distribution.model.IBaseDistributionModel#clear()
*/
@Override
public void clear() {
Arrays.fill(fBuckets, 0);
+ Arrays.fill(fLostEventsBuckets, 0);
fNbEvents = 0;
fFirstBucketTime = 0;
fLastEventTime = 0;
/**
* Sets the current event time (no notification of listeners)
*
- * @param timestamp A time stamp to set.
+ * @param timestamp
+ * A time stamp to set.
* @deprecated As of 2.1, use {@link #setSelection(long, long)}
*/
@Deprecated
/**
* Sets the current event time with notification of listeners
*
- * @param timestamp A time stamp to set.
- * @deprecated As of 2.1, use {@link #setSelectionNotifyListeners(long, long)}
+ * @param timestamp
+ * A time stamp to set.
+ * @deprecated As of 2.1, use
+ * {@link #setSelectionNotifyListeners(long, long)}
*/
@Deprecated
public void setCurrentEventNotifyListeners(long timestamp) {
/**
* Sets the current selection time range (no notification of listeners)
*
- * @param beginTime The selection begin time.
- * @param endTime The selection end time.
+ * @param beginTime
+ * The selection begin time.
+ * @param endTime
+ * The selection end time.
* @since 2.1
*/
public void setSelection(long beginTime, long endTime) {
/**
* Sets the current selection time range with notification of listeners
*
- * @param beginTime The selection begin time.
- * @param endTime The selection end time.
+ * @param beginTime
+ * The selection begin time.
+ * @param endTime
+ * The selection end time.
* @since 2.1
*/
public void setSelectionNotifyListeners(long beginTime, long endTime) {
/**
* Add event to the correct bucket, compacting the if needed.
*
- * @param eventCount The current event Count (for notification purposes)
- * @param timestamp The timestamp of the event to count
+ * @param eventCount
+ * The current event Count (for notification purposes)
+ * @param timestamp
+ * The timestamp of the event to count
*
*/
@Override
int offset = getOffset(timestamp);
// Compact as needed
- while((fLastBucket + offset) >= fNbBuckets) {
+ while ((fLastBucket + offset) >= fNbBuckets) {
mergeBuckets();
offset = getOffset(timestamp);
}
fLastBucket = fLastBucket + offset;
- fFirstBucketTime = fFirstBucketTime - (offset*fBucketDuration);
+ fFirstBucketTime = fFirstBucketTime - (offset * fBucketDuration);
updateEndTime();
}
fireModelUpdateNotification(eventCount);
}
+ /**
+ * Add lost event to the correct bucket, compacting the if needed.
+ *
+ * @param timeRange
+ * time range of a lost event
+ * @param nbLostEvents
+ * the number of lost events
+ * @param fullRange
+ * Full range or time range for histogram request
+ * @since 2.1
+ */
+ public void countLostEvent(TmfTimeRange timeRange, long nbLostEvents, boolean fullRange) {
+
+ // Validate
+ if (timeRange.getStartTime().getValue() < 0 || timeRange.getEndTime().getValue() < 0) {
+ return;
+ }
+
+ // Compact as needed
+ if (fullRange) {
+ while (timeRange.getEndTime().getValue() >= fTimeLimit) {
+ mergeBuckets();
+ }
+ }
+
+ int indexStart = (int) ((timeRange.getStartTime().getValue() - fFirstBucketTime) / fBucketDuration);
+ int indexEnd = (int) ((timeRange.getEndTime().getValue() - fFirstBucketTime) / fBucketDuration);
+ int nbBucketRange = (indexEnd - indexStart) + 1;
+
+ int lostEventPerBucket = (int) Math.ceil((double) nbLostEvents / nbBucketRange);
+ long lastLostCol = Math.max(1, nbLostEvents - lostEventPerBucket * (nbBucketRange - 1));
+
+ // Increment the right bucket, bear in mind that ranges make it almost certain that some lost events are out of range
+ for (int index = indexStart; index <= indexEnd && index < fLostEventsBuckets.length; index++) {
+ if (index == (indexStart + nbBucketRange - 1)) {
+ fLostEventsBuckets[index] += lastLostCol;
+ } else {
+ fLostEventsBuckets[index] += lostEventPerBucket;
+ }
+ }
+
+ fNbEvents++;
+
+ fireModelUpdateNotification(nbLostEvents);
+ }
+
/**
* Scale the model data to the width, height and bar width requested.
*
- * @param width A width of the histogram canvas
- * @param height A height of the histogram canvas
- * @param barWidth A width (in pixel) of a histogram bar
- * @return the result array of size [width] and where the highest value doesn't exceed [height]
+ * @param width
+ * A width of the histogram canvas
+ * @param height
+ * A height of the histogram canvas
+ * @param barWidth
+ * A width (in pixel) of a histogram bar
+ * @return the result array of size [width] and where the highest value
+ * doesn't exceed [height]
*
- * @see org.eclipse.linuxtools.tmf.ui.views.histogram.IHistogramDataModel#scaleTo(int, int, int)
+ * @see org.eclipse.linuxtools.tmf.ui.views.histogram.IHistogramDataModel#scaleTo(int,
+ * int, int)
*/
@Override
public HistogramScaledData scaleTo(int width, int height, int barWidth) {
// Basic validation
- if ((width <= 0) || (height <= 0) || (barWidth <= 0))
- {
+ if ((width <= 0) || (height <= 0) || (barWidth <= 0))
+ {
throw new AssertionError("Invalid histogram dimensions (" + width + "x" + height + ", barWidth=" + barWidth + ")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
}
int nbBars = width / barWidth;
int bucketsPerBar = (fLastBucket / nbBars) + 1;
- result.fBucketDuration = Math.max(bucketsPerBar * fBucketDuration,1);
+ result.fBucketDuration = Math.max(bucketsPerBar * fBucketDuration, 1);
for (int i = 0; i < nbBars; i++) {
int count = 0;
+ int countLostEvent = 0;
for (int j = i * bucketsPerBar; j < ((i + 1) * bucketsPerBar); j++) {
if (fNbBuckets <= j) {
break;
}
count += fBuckets[j];
+ countLostEvent += fLostEventsBuckets[j];
}
result.fData[i] = count;
+ result.fLostEventsData[i] = countLostEvent;
result.fLastBucket = i;
if (result.fMaxValue < count) {
result.fMaxValue = count;
}
+ if (result.fMaxCombinedValue < count + countLostEvent) {
+ result.fMaxCombinedValue = count + countLostEvent;
+ }
}
// Scale vertically
if (result.fMaxValue > 0) {
result.fScalingFactor = (double) height / result.fMaxValue;
}
+ if (result.fMaxCombinedValue > 0) {
+ result.fScalingFactorCombined = (double) height / result.fMaxCombinedValue;
+ }
fBucketDuration = Math.max(fBucketDuration, 1);
// Set selection begin and end index in the scaled histogram
private void mergeBuckets() {
for (int i = 0; i < (fNbBuckets / 2); i++) {
fBuckets[i] = fBuckets[2 * i] + fBuckets[(2 * i) + 1];
+ fLostEventsBuckets[i] = fLostEventsBuckets[2 * i] + fLostEventsBuckets[(2 * i) + 1];
}
Arrays.fill(fBuckets, fNbBuckets / 2, fNbBuckets, 0);
+ Arrays.fill(fLostEventsBuckets, fNbBuckets / 2, fNbBuckets, 0);
fBucketDuration *= 2;
updateEndTime();
fLastBucket = (fNbBuckets / 2) - 1;
}
private void moveBuckets(int offset) {
- for(int i = fNbBuckets - 1; i >= offset; i--) {
- fBuckets[i] = fBuckets[i-offset];
+ for (int i = fNbBuckets - 1; i >= offset; i--) {
+ fBuckets[i] = fBuckets[i - offset];
+ fLostEventsBuckets[i] = fLostEventsBuckets[i - offset];
}
for (int i = 0; i < offset; i++) {
fBuckets[i] = 0;
+ fLostEventsBuckets[i] = 0;
}
}
}
return offset;
}
-
}