org.eclipse.tracecompass.internal.datastore.core.historytree;x-internal:=true,
org.eclipse.tracecompass.internal.datastore.core.serialization;x-internal:=true,
org.eclipse.tracecompass.internal.provisional.datastore.core.condition;x-internal:=true,
- org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;x-internal:=true
+ org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;x-internal:=true,
+ org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic;x-internal:=true
Import-Package: com.google.common.collect
import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HistoryTreeStub;
import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode.NodeType;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic.ClassicHistoryTreeStub;
import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTInterval;
import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTIntervalReader;
import org.junit.Test;
HtTestUtils.READ_FACTORY,
HTNodeTest.BASE_OBJ_FACTORY
},
+ { "Classic core node",
+ HTNode.COMMON_HEADER_SIZE + Integer.BYTES + Integer.BYTES * NB_CHILDREN + Long.BYTES * NB_CHILDREN,
+ ClassicHistoryTreeStub.CLASSIC_NODE_FACTORY,
+ HtTestUtils.READ_FACTORY,
+ HTNodeTest.BASE_OBJ_FACTORY },
+
});
}
* @throws IOException
* Any exception occurring with the file
*/
- public HTCoreNodeTest(String name, int headerSize, IHTNodeFactory<E, N> factory,
- IHTIntervalReader<E> readFactory, ObjectFactory<E> objFactory) throws IOException {
+ public HTCoreNodeTest(String name,
+ int headerSize,
+ IHTNodeFactory<E, N> factory,
+ IHTIntervalReader<E> readFactory,
+ ObjectFactory<E> objFactory) throws IOException {
+
super(name, headerSize, NodeType.CORE, factory, readFactory, objFactory);
}
import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HistoryTreeStub;
import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode;
import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode.NodeType;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic.ClassicHistoryTreeStub;
import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.HTInterval;
import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTInterval;
import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTIntervalReader;
HistoryTreeStub.NODE_FACTORY,
HtTestUtils.READ_FACTORY, BASE_OBJ_FACTORY
},
+ { "Classic leaf node",
+ HTNode.COMMON_HEADER_SIZE,
+ ClassicHistoryTreeStub.CLASSIC_NODE_FACTORY,
+ HtTestUtils.READ_FACTORY,
+ BASE_OBJ_FACTORY },
});
}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTree;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTreeTestBase;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic.ClassicHistoryTreeStub;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic.ClassicNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.HTInterval;
+
+/**
+ * Test the classic history tree
+ *
+ * @author Geneviève Bastien
+ */
+public class ClassicHistoryTreeTest
+ extends AbstractHistoryTreeTestBase<HTInterval, ClassicNode<HTInterval>> {
+
+ private static final HTInterval DEFAULT_OBJECT = new HTInterval(0, 0);
+
+
+ @Override
+ protected ClassicHistoryTreeStub createHistoryTree(
+ File stateHistoryFile,
+ int blockSize,
+ int maxChildren,
+ int providerVersion,
+ long treeStart) throws IOException {
+
+ return new ClassicHistoryTreeStub(stateHistoryFile,
+ blockSize,
+ maxChildren,
+ providerVersion,
+ treeStart);
+ }
+
+ @Override
+ protected ClassicHistoryTreeStub createHistoryTree(
+ File existingStateFile, int expectedProviderVersion) throws IOException {
+ return new ClassicHistoryTreeStub(existingStateFile, expectedProviderVersion);
+ }
+
+ @Override
+ protected HTInterval createInterval(long start, long end) {
+ return new HTInterval(start, end);
+ }
+
+ @Override
+ protected long fillValues(AbstractHistoryTree<HTInterval, ClassicNode<HTInterval>> ht,
+ int fillSize, long start) {
+
+ int nbValues = fillSize / DEFAULT_OBJECT.getSizeOnDisk();
+ for (int i = 0; i < nbValues; i++) {
+ ht.insert(new HTInterval(start + i, start + i + 1));
+ }
+ return start + nbValues;
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic.ClassicHistoryTree;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic.ClassicNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.HTInterval;
+
+/**
+ * A stub for the classic history tree. Limits type "E" to {@link HTInterval}.
+ *
+ * The advantage is that the node factory can now be declared statically, and
+ * accessed directly by test methods that need it.
+ *
+ * @author Geneviève Bastien
+ */
+public class ClassicHistoryTreeStub extends ClassicHistoryTree<HTInterval> {
+
+ /**
+ * The magic number for this file format.
+ */
+ private static final int CLASSIC_HISTORY_STUB_FILE_MAGIC_NUMBER = 0x07E57A91;
+
+ /** File format version. Increment when breaking compatibility. */
+ private static final int FILE_VERSION = 1;
+
+ /**
+ * A factory to create leaf and core nodes based on the BaseHtObject object
+ */
+ public static final IHTNodeFactory<HTInterval, ClassicNode<HTInterval>> CLASSIC_NODE_FACTORY =
+ (t, b, m, seq, p, start) -> new ClassicNode<>(t, b, m, seq, p, start);
+
+ /**
+ * Create a new Classic History Tree test stub from scratch, specifying all
+ * configuration parameters.
+ *
+ * @param stateHistoryFile
+ * The name of the history file
+ * @param blockSize
+ * The size of each "block" on disk in bytes. One node will
+ * always fit in one block. It should be at least 4096.
+ * @param maxChildren
+ * The maximum number of children allowed per core (non-leaf)
+ * node.
+ * @param providerVersion
+ * The version of the state provider. If a file already exists,
+ * and their versions match, the history file will not be rebuilt
+ * uselessly.
+ * @param treeStart
+ * The start time of the history
+ * @throws IOException
+ * If an error happens trying to open/write to the file
+ * specified in the config
+ */
+ public ClassicHistoryTreeStub(File stateHistoryFile,
+ int blockSize,
+ int maxChildren,
+ int providerVersion,
+ long treeStart) throws IOException {
+
+ super(stateHistoryFile,
+ blockSize,
+ maxChildren,
+ providerVersion,
+ treeStart,
+ HTInterval.INTERVAL_READER);
+ }
+
+ /**
+ * "Reader" constructor : instantiate a SHTree from an existing tree file on
+ * disk
+ *
+ * @param existingStateFile
+ * Path/filename of the history-file we are to open
+ * @param expProviderVersion
+ * The expected version of the state provider
+ * @throws IOException
+ * If an error happens reading the file
+ */
+ public ClassicHistoryTreeStub(File existingStateFile, int expProviderVersion) throws IOException {
+ super(existingStateFile, expProviderVersion, HTInterval.INTERVAL_READER);
+ }
+
+
+ @Override
+ protected IHTNodeFactory<HTInterval, ClassicNode<HTInterval>> getNodeFactory() {
+ return CLASSIC_NODE_FACTORY;
+ }
+
+ @Override
+ protected int getMagicNumber() {
+ return CLASSIC_HISTORY_STUB_FILE_MAGIC_NUMBER;
+ }
+
+ @Override
+ protected int getFileVersion() {
+ return FILE_VERSION;
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are made
+ * available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+@org.eclipse.jdt.annotation.NonNullByDefault
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic;
org.eclipse.tracecompass.internal.provisional.datastore.core.condition;x-internal:=true,
org.eclipse.tracecompass.internal.provisional.datastore.core.exceptions,
org.eclipse.tracecompass.internal.provisional.datastore.core.historytree;x-internal:=true,
+ org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic;x-internal:=true,
org.eclipse.tracecompass.internal.provisional.datastore.core.interval;x-internal:=true,
org.eclipse.tracecompass.internal.provisional.datastore.core.serialization;x-friends:="org.eclipse.tracecompass.statesystem.core,org.eclipse.tracecompass.statesystem.core.tests"
Import-Package: com.google.common.annotations,
fChildren = new int[fNode.fMaxChildren];
}
+ /**
+ * Return this core data's full node. To be used by subclasses.
+ *
+ * @return The node
+ */
+ protected HTNode<?> getNode() {
+ return fNode;
+ }
+
/**
* Read the specific header for this extra node data
*
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2010, 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collection;
+
+import org.eclipse.tracecompass.internal.provisional.datastore.core.condition.RangeCondition;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.AbstractHistoryTree;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTInterval;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTIntervalReader;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Classic history tree, where children nodes do not overlap and are sequential,
+ * ie the start of node(i+1) is equal to end of node(i) - 1
+ *
+ * @author Alexandre Montplaisir
+ * @param <E>
+ * The type of objects that will be saved in the tree
+ */
+public class ClassicHistoryTree<E extends IHTInterval>
+ extends AbstractHistoryTree<E, ClassicNode<E>> {
+
+ /** The magic number for this file format. */
+ public static final int HISTORY_FILE_MAGIC_NUMBER = 0x05FFA900;
+
+ /** File format version. Increment when breaking compatibility. */
+ private static final int FILE_VERSION = 8;
+
+
+ // ------------------------------------------------------------------------
+ // Constructors/"Destructors"
+ // ------------------------------------------------------------------------
+
+ /**
+ * Create a new Classic (aka Sequential) History Tree from scratch,
+ * specifying all configuration parameters.
+ *
+ * @param stateHistoryFile
+ * The name of the history file
+ * @param blockSize
+ * The size of each "block" on disk in bytes. One node will
+ * always fit in one block. It should be at least 4096.
+ * @param maxChildren
+ * The maximum number of children allowed per core (non-leaf)
+ * node.
+ * @param providerVersion
+ * The version of the state provider. If a file already exists,
+ * and their versions match, the history file will not be rebuilt
+ * uselessly.
+ * @param treeStart
+ * The start time of the history
+ * @param intervalReader
+ * The factory to create new tree data elements when reading from
+ * the disk
+ * @throws IOException
+ * If an error happens trying to open/write to the file
+ * specified in the config
+ */
+ public ClassicHistoryTree(File stateHistoryFile,
+ int blockSize,
+ int maxChildren,
+ int providerVersion,
+ long treeStart,
+ IHTIntervalReader<E> intervalReader) throws IOException {
+
+ super(stateHistoryFile,
+ blockSize,
+ maxChildren,
+ providerVersion,
+ treeStart,
+ intervalReader);
+ }
+
+ /**
+ * "Reader" constructor : instantiate a Classic History Tree from an
+ * existing tree file on disk
+ *
+ * @param existingStateFile
+ * Path/filename of the history-file we are to open
+ * @param expProviderVersion
+ * The expected version of the state provider
+ * @param intervalReader
+ * The factory used to read segments from the history tree
+ * @throws IOException
+ * If an error happens reading the file
+ */
+ public ClassicHistoryTree(File existingStateFile,
+ int expProviderVersion,
+ IHTIntervalReader<E> intervalReader) throws IOException {
+ super(existingStateFile, expProviderVersion, intervalReader);
+ }
+
+ @Override
+ protected int getMagicNumber() {
+ return HISTORY_FILE_MAGIC_NUMBER;
+ }
+
+ @Override
+ protected int getFileVersion() {
+ return FILE_VERSION;
+ }
+
+ @Override
+ protected IHTNodeFactory<E, ClassicNode<E>> getNodeFactory() {
+ return (t, b, m, seq, p, start) -> new ClassicNode<>(t, b, m, seq, p, start);
+ }
+
+ @Override
+ protected long getNewBranchStart(int depth, E interval) {
+ // The new branch starts at the end of the tree + 1, because the last
+ // branch closed at tree end and they must be sequential
+ return getTreeEnd() + 1;
+ }
+
+ // ------------------------------------------------------------------------
+ // Test-specific methods
+ // ------------------------------------------------------------------------
+
+ @Override
+ @VisibleForTesting
+ protected boolean verifyChildrenSpecific(ClassicNode<E> parent,
+ int index, ClassicNode<E> child) {
+ return (parent.getChildStart(index) == child.getNodeStart());
+ }
+
+ @Override
+ @VisibleForTesting
+ protected boolean verifyIntersectingChildren(ClassicNode<E> parent, ClassicNode<E> child) {
+ int childSequence = child.getSequenceNumber();
+ for (long t = parent.getNodeStart(); t < parent.getNodeEnd(); t++) {
+ RangeCondition<Long> timeCondition = RangeCondition.singleton(t);
+ boolean shouldBeInCollection = timeCondition.intersects(child.getNodeStart(), child.getNodeEnd());
+ Collection<Integer> nextChildren = parent.selectNextChildren(timeCondition);
+ /* There should be only one intersecting child */
+ if (nextChildren.size() != 1
+ || shouldBeInCollection != nextChildren.contains(childSequence)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are
+ * made available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic;
+
+import static org.eclipse.tracecompass.common.core.NonNullUtils.checkNotNull;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Objects;
+
+import org.eclipse.jdt.annotation.NonNull;
+import org.eclipse.jdt.annotation.Nullable;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.condition.RangeCondition;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.exceptions.RangeException;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.HTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.IHTNode;
+import org.eclipse.tracecompass.internal.provisional.datastore.core.interval.IHTInterval;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * The type of node used for classic history tree
+ *
+ * @author Geneviève Bastien
+ *
+ * @param <E>
+ * The type of objects that will be saved in the tree
+ */
+public class ClassicNode<E extends IHTInterval> extends HTNode<E> {
+
+ /**
+ * Adds the data concerning the classic nodes, the start of each child node
+ */
+ protected static class ClassicCoreNodeData extends CoreNodeData {
+
+ /** Start times of each of the children (size = MAX_NB_CHILDREN) */
+ private final long[] fChildStart;
+
+ /**
+ * Classic history tree node data constructor
+ *
+ * @param node
+ * The node containing this extra data.
+ */
+ public ClassicCoreNodeData(ClassicNode<?> node) {
+ super(node);
+
+ int size = node.getMaxChildren();
+ /*
+ * * We instantiate the two following arrays at full size right
+ * away, since we want to reserve that space in the node's header.
+ * "this.nbChildren" will tell us how many relevant entries there
+ * are in those tables.
+ */
+ fChildStart = new long[size];
+ }
+
+ @Override
+ protected ClassicNode<?> getNode() {
+ /* Type enforced by constructor */
+ return (ClassicNode<?>) super.getNode();
+ }
+
+
+ @Override
+ public void readSpecificHeader(@NonNull ByteBuffer buffer) {
+ super.readSpecificHeader(buffer);
+
+ int size = getNode().getMaxChildren();
+
+ for (int i = 0; i < getNbChildren(); i++) {
+ fChildStart[i] = buffer.getLong();
+ }
+ for (int i = getNbChildren(); i < size; i++) {
+ buffer.getLong();
+ }
+ }
+
+ @Override
+ protected void writeSpecificHeader(@NonNull ByteBuffer buffer) {
+ super.writeSpecificHeader(buffer);
+
+ int size = getNode().getMaxChildren();
+
+ /* Write the "children's start times" array */
+ for (int i = 0; i < getNbChildren(); i++) {
+ buffer.putLong(fChildStart[i]);
+ }
+ for (int i = getNbChildren(); i < size; i++) {
+ buffer.putLong(0);
+ }
+ }
+
+ @Override
+ protected int getSpecificHeaderSize() {
+ int maxChildren = getNode().getMaxChildren();
+ int specificSize = super.getSpecificHeaderSize();
+ /* MAX_NB * Timevalue for start time */
+ specificSize += Long.BYTES * maxChildren;
+
+ return specificSize;
+ }
+
+ @Override
+ public void linkNewChild(IHTNode<?> childNode) {
+ getNode().takeWriteLock();
+ try {
+ super.linkNewChild(childNode);
+ int nbChildren = getNbChildren();
+
+ fChildStart[nbChildren - 1] = childNode.getNodeStart();
+
+ } finally {
+ getNode().releaseWriteLock();
+ }
+ }
+
+ @Override
+ protected Collection<Integer> selectNextIndices(RangeCondition<Long> rc) {
+ ClassicNode<?> node = getNode();
+
+ if (rc.min() < node.getNodeStart()
+ || (node.isOnDisk() && rc.max() > node.getNodeEnd())) {
+ throw new RangeException("Requesting children outside the node's range: " + rc.toString()); //$NON-NLS-1$
+ }
+
+ node.takeReadLock();
+ try {
+ int nbChildren = getNbChildren();
+ if (nbChildren == 0) {
+ return Collections.EMPTY_LIST;
+ }
+
+ List<Integer> matchingChildren = new LinkedList<>();
+ /* Check all children except the last one */
+ for (int i = 0; i < nbChildren - 1; i++) {
+ long childStart = fChildStart[i];
+ /* Nodes are sequential */
+ long childEnd = fChildStart[i + 1] - 1;
+
+ if (rc.intersects(childStart, childEnd)) {
+ matchingChildren.add(i);
+ }
+ }
+
+ /* Check the last child */
+ {
+ int i = nbChildren - 1;
+ long childStart = fChildStart[i];
+ long childEnd = getNode().getNodeEnd();
+ if (rc.intersects(childStart, childEnd)) {
+ matchingChildren.add(i);
+ }
+ }
+
+ return matchingChildren;
+
+ } finally {
+ node.releaseReadLock();
+ }
+ }
+
+ /**
+ * Get the start value of a child
+ *
+ * @param index
+ * The child index
+ * @return The start value
+ */
+ public long getChildStart(int index) {
+ getNode().takeReadLock();
+ try {
+ if (index >= getNbChildren()) {
+ throw new IndexOutOfBoundsException("The child at index " + index + " does not exist"); //$NON-NLS-1$ //$NON-NLS-2$
+ }
+ return fChildStart[index];
+ } finally {
+ getNode().releaseReadLock();
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), fChildStart);
+ }
+
+ @Override
+ public boolean equals(@Nullable Object obj) {
+ if (!super.equals(obj)) {
+ return false;
+ }
+ /* super.equals already checks for null / same class */
+ ClassicCoreNodeData other = (ClassicCoreNodeData) checkNotNull(obj);
+ return (Arrays.equals(fChildStart, other.fChildStart));
+ }
+
+ }
+
+ /**
+ * Constructor
+ *
+ * @param type
+ * The type of this node
+ * @param blockSize
+ * The size (in bytes) of a serialized node on disk
+ * @param maxChildren
+ * The maximum allowed number of children per node
+ * @param seqNumber
+ * The (unique) sequence number assigned to this particular node
+ * @param parentSeqNumber
+ * The sequence number of this node's parent node
+ * @param start
+ * The earliest timestamp stored in this node
+ */
+ public ClassicNode(NodeType type, int blockSize, int maxChildren,
+ int seqNumber, int parentSeqNumber, long start) {
+ super(type, blockSize, maxChildren, seqNumber, parentSeqNumber, start);
+ }
+
+ @Override
+ protected @Nullable ClassicCoreNodeData createNodeExtraData(final NodeType type) {
+ if (type == NodeType.CORE) {
+ return new ClassicCoreNodeData(this);
+ }
+ return null;
+ }
+
+ @Override
+ protected @Nullable ClassicCoreNodeData getCoreNodeData() {
+ return (ClassicCoreNodeData) super.getCoreNodeData();
+ }
+
+ /**
+ * Get the start value of a child of this node
+ *
+ * @param index The index of the node to get the child start
+ * @return The child start value
+ */
+ @VisibleForTesting
+ long getChildStart(int index) {
+ ClassicCoreNodeData extraData = getCoreNodeData();
+ if (extraData != null) {
+ return extraData.getChildStart(index);
+ }
+ throw new UnsupportedOperationException("A leaf node does not have children"); //$NON-NLS-1$
+ }
+
+}
--- /dev/null
+/*******************************************************************************
+ * Copyright (c) 2017 École Polytechnique de Montréal
+ *
+ * All rights reserved. This program and the accompanying materials are made
+ * available under the terms of the Eclipse Public License v1.0 which
+ * accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *******************************************************************************/
+
+@org.eclipse.jdt.annotation.NonNullByDefault
+package org.eclipse.tracecompass.internal.provisional.datastore.core.historytree.classic;