<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable
org.eclipse.jdt.core.compiler.annotation.nullanalysis=enabled
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
-org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.compliance=1.7
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.discouragedReference=error
org.eclipse.jdt.core.compiler.problem.emptyStatement=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=warning
+org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=error
org.eclipse.jdt.core.compiler.problem.fallthroughCase=error
org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
org.eclipse.jdt.core.compiler.problem.fieldHiding=error
org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning
org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=warning
org.eclipse.jdt.core.compiler.problem.redundantNullCheck=error
-org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=warning
+org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=error
org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=error
org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=error
org.eclipse.jdt.core.compiler.problem.unusedTypeParameter=ignore
org.eclipse.jdt.core.compiler.problem.unusedWarningToken=error
org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=error
-org.eclipse.jdt.core.compiler.source=1.6
+org.eclipse.jdt.core.compiler.source=1.7
org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation=0
Bundle-SymbolicName: org.eclipse.linuxtools.tmf.core;singleton:=true
Bundle-Activator: org.eclipse.linuxtools.internal.tmf.core.Activator
Bundle-ActivationPolicy: lazy
-Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Bundle-RequiredExecutionEnvironment: JavaSE-1.7
Require-Bundle: org.eclipse.core.runtime,
org.eclipse.core.resources,
org.eclipse.linuxtools.ctf.core;bundle-version="3.0.0"
* The mapping of available trace type IDs to their corresponding
* configuration element
*/
- private final Map<String, IAnalysisModuleHelper> fAnalysisTypeAttributes = new HashMap<String, IAnalysisModuleHelper>();
+ private final Map<String, IAnalysisModuleHelper> fAnalysisTypeAttributes = new HashMap<>();
private static TmfAnalysisType fInstance = null;
public static IConfigurationElement[] getTypeElements() {
IConfigurationElement[] elements =
Platform.getExtensionRegistry().getConfigurationElementsFor(TMF_ANALYSIS_TYPE_ID);
- List<IConfigurationElement> typeElements = new LinkedList<IConfigurationElement>();
+ List<IConfigurationElement> typeElements = new LinkedList<>();
for (IConfigurationElement element : elements) {
if (element.getName().equals(MODULE_ELEM)) {
typeElements.add(element);
// ------------------------------------------------------------------------
private static Map<Class<? extends ITmfEvent>, List<TmfEventProvider>> fProviders =
- new HashMap<Class<? extends ITmfEvent>, List<TmfEventProvider>>();
+ new HashMap<>();
/**
* Registers [provider] as a provider of [eventType]
public static TmfEventProvider[] getProviders(Class<? extends ITmfEvent> eventType) {
List<TmfEventProvider> list = fProviders.get(eventType);
if (list == null) {
- list = new ArrayList<TmfEventProvider>();
+ list = new ArrayList<>();
}
TmfEventProvider[] result = new TmfEventProvider[list.size()];
return list.toArray(result);
return getProviders(eventType);
}
TmfEventProvider[] list = getProviders(eventType);
- List<TmfEventProvider> result = new ArrayList<TmfEventProvider>();
+ List<TmfEventProvider> result = new ArrayList<>();
if (list != null) {
for (TmfEventProvider provider : list) {
if (provider.getClass() == providerType) {
// ------------------------------------------------------------------------
/** The list of coalesced requests */
- private final List<ITmfEventRequest> fRequests = new ArrayList<ITmfEventRequest>();
+ private final List<ITmfEventRequest> fRequests = new ArrayList<>();
/**
* We do not use super.fRange, because in the case of coalesced requests,
private final String fExecutorName;
// The request queues
- private final Queue<TmfEventThread> fForegroundTasks = new ArrayBlockingQueue<TmfEventThread>(100);
- private final Queue<TmfEventThread> fBackgroundTasks = new ArrayBlockingQueue<TmfEventThread>(100);
+ private final Queue<TmfEventThread> fForegroundTasks = new ArrayBlockingQueue<>(100);
+ private final Queue<TmfEventThread> fBackgroundTasks = new ArrayBlockingQueue<>(100);
// The tasks
private TmfEventThread fActiveTask;
this.parent = parent;
this.quark = quark;
this.name = name;
- this.subAttributes = new ArrayList<Attribute>();
+ this.subAttributes = new ArrayList<>();
}
/**
* @return
*/
String[] getFullAttribute() {
- LinkedList<String> list = new LinkedList<String>();
+ LinkedList<String> list = new LinkedList<>();
Attribute curNode = this;
/* Add recursive parents to the list, but stop at the root node */
AlphaNumAttribute(Attribute parent, String name, int quark) {
super(parent, name, quark);
- this.subAttributesMap = new HashMap<String, Integer>();
+ this.subAttributesMap = new HashMap<>();
}
@Override
/* Message for exceptions, shouldn't be externalized */
final String errorMessage = "The attribute tree file section is either invalid or corrupted."; //$NON-NLS-1$
- ArrayList<String[]> list = new ArrayList<String[]>();
+ ArrayList<String[]> list = new ArrayList<>();
byte[] curByteArray;
String curFullString;
String[] curStringArray;
* @return The total number of bytes written.
*/
int writeSelf(File file, long pos) {
- RandomAccessFile raf = null;
int total = 0;
byte[] curByteArray;
- try {
- raf = new RandomAccessFile(file, "rw"); //$NON-NLS-1$
+ try (RandomAccessFile raf = new RandomAccessFile(file, "rw");) { //$NON-NLS-1$
raf.seek(pos);
/* Write the almost-magic number */
} catch (IOException e) {
e.printStackTrace();
- } finally {
- if (raf != null) {
- try {
- raf.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
}
return total;
}
*/
List<Integer> getSubAttributes(int attributeQuark, boolean recursive)
throws AttributeNotFoundException {
- List<Integer> listOfChildren = new ArrayList<Integer>();
+ List<Integer> listOfChildren = new ArrayList<>();
Attribute startingAttribute;
/* Check if the quark is valid */
@Override
public List<Integer> getQuarks(String... pattern) {
- List<Integer> quarks = new LinkedList<Integer>();
- List<String> prefix = new LinkedList<String>();
- List<String> suffix = new LinkedList<String>();
+ List<Integer> quarks = new LinkedList<>();
+ List<String> prefix = new LinkedList<>();
+ List<String> suffix = new LinkedList<>();
boolean split = false;
String[] prefixStr;
String[] suffixStr;
throw new StateSystemDisposedException();
}
- List<ITmfStateInterval> stateInfo = new ArrayList<ITmfStateInterval>(getNbAttributes());
+ List<ITmfStateInterval> stateInfo = new ArrayList<>(getNbAttributes());
/* Bring the size of the array to the current number of attributes */
for (int i = 0; i < getNbAttributes(); i++) {
}
/* Get the initial state at time T1 */
- intervals = new ArrayList<ITmfStateInterval>();
+ intervals = new ArrayList<>();
currentInterval = querySingleState(t1, attributeQuark);
intervals.add(currentInterval);
}
/* Get the initial state at time T1 */
- intervals = new ArrayList<ITmfStateInterval>();
+ intervals = new ArrayList<>();
currentInterval = querySingleState(t1, attributeQuark);
intervals.add(currentInterval);
TransientState(IStateHistoryBackend backend) {
this.backend = backend;
isActive = true;
- ongoingStateInfo = new ArrayList<ITmfStateValue>();
- ongoingStateStartTimes = new ArrayList<Long>();
- stateValueTypes = new ArrayList<Type>();
+ ongoingStateInfo = new ArrayList<>();
+ ongoingStateStartTimes = new ArrayList<>();
+ stateValueTypes = new ArrayList<>();
if (backend != null) {
latestTime = backend.getStartTime();
*/
synchronized void replaceOngoingState(List<ITmfStateInterval> newStateIntervals) {
int size = newStateIntervals.size();
- ongoingStateInfo = new ArrayList<ITmfStateValue>(size);
- ongoingStateStartTimes = new ArrayList<Long>(size);
- stateValueTypes = new ArrayList<Type>(size);
+ ongoingStateInfo = new ArrayList<>(size);
+ ongoingStateStartTimes = new ArrayList<>(size);
+ stateValueTypes = new ArrayList<>(size);
for (ITmfStateInterval interval : newStateIntervals) {
ongoingStateInfo.add(interval.getStateValue());
public InMemoryBackend(long startTime) {
this.startTime = startTime;
this.latestTime = startTime;
- this.intervals = new TreeSet<ITmfStateInterval>(END_COMPARATOR);
+ this.intervals = new TreeSet<>(END_COMPARATOR);
}
@Override
this.stringSectionOffset = config.getBlockSize();
this.isDone = false;
- this.intervals = new ArrayList<HTInterval>();
+ this.intervals = new ArrayList<>();
}
/**
config = conf;
treeEnd = conf.getTreeStart();
nodeCount = 0;
- latestBranch = new ArrayList<CoreNode>();
+ latestBranch = new ArrayList<>();
/* Prepare the IO object */
treeIO = new HT_IO(config, true);
throw new IOException("Empty target file"); //$NON-NLS-1$
}
- FileInputStream fis = new FileInputStream(existingStateFile);
- ByteBuffer buffer = ByteBuffer.allocate(TREE_HEADER_SIZE);
- FileChannel fc = fis.getChannel();
- buffer.order(ByteOrder.LITTLE_ENDIAN);
- buffer.clear();
- fc.read(buffer);
- buffer.flip();
+ try (FileInputStream fis = new FileInputStream(existingStateFile);
+ FileChannel fc = fis.getChannel();) {
- /*
- * Check the magic number to make sure we're opening the right type of
- * file
- */
- res = buffer.getInt();
- if (res != HISTORY_FILE_MAGIC_NUMBER) {
- fc.close();
- fis.close();
- throw new IOException("Wrong magic number"); //$NON-NLS-1$
- }
+ ByteBuffer buffer = ByteBuffer.allocate(TREE_HEADER_SIZE);
- res = buffer.getInt(); /* File format version number */
- if (res != FILE_VERSION) {
- fc.close();
- fis.close();
- throw new IOException("Mismatching History Tree file format versions"); //$NON-NLS-1$
- }
+ buffer.order(ByteOrder.LITTLE_ENDIAN);
+ buffer.clear();
+ fc.read(buffer);
+ buffer.flip();
- res = buffer.getInt(); /* Event handler's version number */
- if (res != expProviderVersion &&
- expProviderVersion != ITmfStateProvider.IGNORE_PROVIDER_VERSION) {
/*
- * The existing history was built using an event handler that doesn't
- * match the current one in the framework.
- *
- * Information could be all wrong. Instead of keeping an incorrect
- * history file, a rebuild is done.
+ * Check the magic number to make sure we're opening the right type
+ * of file
*/
- fc.close();
- fis.close();
- throw new IOException("Mismatching event handler versions"); //$NON-NLS-1$
- }
+ res = buffer.getInt();
+ if (res != HISTORY_FILE_MAGIC_NUMBER) {
+ throw new IOException("Wrong magic number"); //$NON-NLS-1$
+ }
- bs = buffer.getInt(); /* Block Size */
- maxc = buffer.getInt(); /* Max nb of children per node */
+ res = buffer.getInt(); /* File format version number */
+ if (res != FILE_VERSION) {
+ throw new IOException("Mismatching History Tree file format versions"); //$NON-NLS-1$
+ }
- this.nodeCount = buffer.getInt();
- rootNodeSeqNb = buffer.getInt();
- startTime = buffer.getLong();
+ res = buffer.getInt(); /* Event handler's version number */
+ if (res != expProviderVersion &&
+ expProviderVersion != ITmfStateProvider.IGNORE_PROVIDER_VERSION) {
+ /*
+ * The existing history was built using an event handler that
+ * doesn't match the current one in the framework.
+ *
+ * Information could be all wrong. Instead of keeping an
+ * incorrect history file, a rebuild is done.
+ */
+ throw new IOException("Mismatching event handler versions"); //$NON-NLS-1$
+ }
+
+ bs = buffer.getInt(); /* Block Size */
+ maxc = buffer.getInt(); /* Max nb of children per node */
+
+ this.nodeCount = buffer.getInt();
+ rootNodeSeqNb = buffer.getInt();
+ startTime = buffer.getLong();
+
+ this.config = new HTConfig(existingStateFile, bs, maxc, expProviderVersion, startTime);
+ }
- this.config = new HTConfig(existingStateFile, bs, maxc, expProviderVersion, startTime);
- fc.close();
- fis.close();
/*
* FIXME We close fis here and the TreeIO will then reopen the same
* file, not extremely elegant. But how to pass the information here to
* with was is actually in the root node.
*/
if (startTime != latestBranch.get(0).getNodeStart()) {
- fc.close();
- fis.close();
throw new IOException("Inconsistent start times in the" + //$NON-NLS-1$
"history file, it might be corrupted."); //$NON-NLS-1$
}
* The greatest timestamp present in the history tree
*/
void closeTree(long requestedEndTime) {
- FileChannel fc;
ByteBuffer buffer;
int i, res;
treeIO.writeNode(latestBranch.get(i));
}
- fc = treeIO.getFcOut();
- buffer = ByteBuffer.allocate(TREE_HEADER_SIZE);
- buffer.order(ByteOrder.LITTLE_ENDIAN);
- buffer.clear();
+ try (FileChannel fc = treeIO.getFcOut();) {
+ buffer = ByteBuffer.allocate(TREE_HEADER_SIZE);
+ buffer.order(ByteOrder.LITTLE_ENDIAN);
+ buffer.clear();
- /* Save the config of the tree to the header of the file */
- try {
+ /* Save the config of the tree to the header of the file */
fc.position(0);
buffer.putInt(HISTORY_FILE_MAGIC_NUMBER);
/* done writing the file header */
} catch (IOException e) {
- /* We should not have any problems at this point... */
- } finally {
- try {
- fc.close();
- } catch (IOException e) {
- }
+ /*
+ * If we were able to write so far, there should not be any problem
+ * at this point...
+ */
+ // FIXME still, the IOException should be propagated upwards
+ throw new RuntimeException();
}
return;
}
private void rebuildLatestBranch(int rootNodeSeqNb) throws ClosedChannelException {
HTNode nextChildNode;
- this.latestBranch = new ArrayList<CoreNode>();
+ this.latestBranch = new ArrayList<>();
nextChildNode = treeIO.readNode(rootNodeSeqNb);
latestBranch.add((CoreNode) nextChildNode);
/* Rebuild a new latestBranch */
depth = latestBranch.size();
- latestBranch = new ArrayList<CoreNode>();
+ latestBranch = new ArrayList<>();
latestBranch.add(newRootNode);
for (i = 1; i < depth + 1; i++) {
prevNode = latestBranch.get(i - 1);
throws IOException {
super(newStateFile, blockSize, maxChildren, providerVersion, startTime);
- intervalQueue = new ArrayBlockingQueue<HTInterval>(queueSize);
+ intervalQueue = new ArrayBlockingQueue<>(queueSize);
shtThread = new Thread(this, "History Tree Thread"); //$NON-NLS-1$
shtThread.start();
}
int providerVersion, int queueSize) throws IOException {
super(newStateFile, providerVersion, startTime);
- intervalQueue = new ArrayBlockingQueue<HTInterval>(queueSize);
+ intervalQueue = new ArrayBlockingQueue<>(queueSize);
shtThread = new Thread(this, "History Tree Thread"); //$NON-NLS-1$
shtThread.start();
}
private final IStateHistoryBackend innerHistory;
/** Checkpoints map, <Timestamp, Rank in the trace> */
- private final TreeMap<Long, Long> checkpoints =
- new TreeMap<Long, Long>();
+ private final TreeMap<Long, Long> checkpoints = new TreeMap<>();
/** Latch tracking if the initial checkpoint registration is done */
private final CountDownLatch checkpointsReady = new CountDownLatch(1);
/**
* Map of mipmap features per attribute. The map's key is the base attribute quark.
*/
- private Map<Integer, Set<ITmfMipmapFeature>> featureMap = new HashMap<Integer, Set<ITmfMipmapFeature>>();
+ private Map<Integer, Set<ITmfMipmapFeature>> featureMap = new HashMap<>();
// ------------------------------------------------------------------------
// Constructor
if (features != null) {
return features;
}
- features = new LinkedHashSet<ITmfMipmapFeature>();
+ features = new LinkedHashSet<>();
if (value.isNull()) {
return features;
}
/** The current start time for the state value */
protected long currentStartTime;
/** The list of ongoing state intervals per mipmap level */
- protected List<List<ITmfStateInterval>> intervals = new ArrayList<List<ITmfStateInterval>>();
+ protected List<List<ITmfStateInterval>> intervals = new ArrayList<>();
/** The state system used to store the mipmap attributes */
protected ITmfStateSystemBuilder ss;
private int mipmapResolution;
private int mipmapQuark;
- private List<Integer> levelQuarks = new ArrayList<Integer>();
+ private List<Integer> levelQuarks = new ArrayList<>();
/**
* Constructor
* kept at the front of the double-ended queue and the least recently used
* node is kept at the back.
*/
- private final Deque<BTreeNode> fCachedNodes = new ArrayDeque<BTreeNode>(CACHE_SIZE);
+ private final Deque<BTreeNode> fCachedNodes = new ArrayDeque<>(CACHE_SIZE);
private int fCcheMisses = 0;
* @param trace the trace
*/
public TmfMemoryIndex(ITmfTrace trace) {
- fCheckpoints = new ArrayList<ITmfCheckpoint>();
+ fCheckpoints = new ArrayList<>();
}
@Override
private String fName, fId;
private boolean fAutomatic = false, fStarted = false;
private ITmfTrace fTrace;
- private final Map<String, Object> fParameters = new HashMap<String, Object>();
- private final List<String> fParameterNames = new ArrayList<String>();
- private final List<IAnalysisOutput> fOutputs = new ArrayList<IAnalysisOutput>();
- private List<IAnalysisParameterProvider> fParameterProviders = new ArrayList<IAnalysisParameterProvider>();
+ private final Map<String, Object> fParameters = new HashMap<>();
+ private final List<String> fParameterNames = new ArrayList<>();
+ private final List<IAnalysisOutput> fOutputs = new ArrayList<>();
+ private List<IAnalysisParameterProvider> fParameterProviders = new ArrayList<>();
private Job fJob = null;
private final Object syncObj = new Object();
*/
public class TmfAnalysisManager {
- private static final Map<String, IAnalysisModuleHelper> fAnalysisModules = new HashMap<String, IAnalysisModuleHelper>();
- private static final Map<String, List<Class<? extends IAnalysisParameterProvider>>> fParameterProviders = new HashMap<String, List<Class<? extends IAnalysisParameterProvider>>>();
- private static final Map<Class<? extends IAnalysisParameterProvider>, IAnalysisParameterProvider> fParamProviderInstances = new HashMap<Class<? extends IAnalysisParameterProvider>, IAnalysisParameterProvider>();
+ private static final Map<String, IAnalysisModuleHelper> fAnalysisModules = new HashMap<>();
+ private static final Map<String, List<Class<? extends IAnalysisParameterProvider>>> fParameterProviders = new HashMap<>();
+ private static final Map<Class<? extends IAnalysisParameterProvider>, IAnalysisParameterProvider> fParamProviderInstances = new HashMap<>();
/**
* Gets all available analysis module helpers
*/
public static Map<String, IAnalysisModuleHelper> getAnalysisModules(Class<? extends ITmfTrace> traceclass) {
Map<String, IAnalysisModuleHelper> allModules = getAnalysisModules();
- Map<String, IAnalysisModuleHelper> map = new HashMap<String, IAnalysisModuleHelper>();
+ Map<String, IAnalysisModuleHelper> map = new HashMap<>();
for (IAnalysisModuleHelper module : allModules.values()) {
if (module.appliesToTraceType(traceclass)) {
map.put(module.getId(), module);
* @return A parameter provider if one applies to the trace, null otherwise
*/
public static List<IAnalysisParameterProvider> getParameterProviders(IAnalysisModule module, ITmfTrace trace) {
- List<IAnalysisParameterProvider> providerList = new ArrayList<IAnalysisParameterProvider>();
+ List<IAnalysisParameterProvider> providerList = new ArrayList<>();
synchronized (fParameterProviders) {
if (!fParameterProviders.containsKey(module.getId())) {
return providerList;
/** List of coalesced requests
* @since 3.0*/
- protected final List<TmfCoalescedEventRequest> fPendingCoalescedRequests =
- new ArrayList<TmfCoalescedEventRequest>();
+ protected final List<TmfCoalescedEventRequest> fPendingCoalescedRequests = new ArrayList<>();
/** The type of event handled by this provider
* @since 3.0*/
public TmfEventProvider() {
super();
fQueueSize = DEFAULT_QUEUE_SIZE;
- fDataQueue = new LinkedBlockingQueue<ITmfEvent>(fQueueSize);
+ fDataQueue = new LinkedBlockingQueue<>(fQueueSize);
fExecutor = new TmfRequestExecutor();
}
/*
* The map of traces to trace managers.
*/
- private static HashMap<CtfTmfTrace, CtfTraceManager> map = new HashMap<CtfTmfTrace, CtfTraceManager>();
+ private static HashMap<CtfTmfTrace, CtfTraceManager> map = new HashMap<>();
/**
* Registers a trace to the iterator manager, the trace can now get
private final Random fRnd;
public CtfTraceManager(CtfTmfTrace trace) {
- fMap = new HashMap<CtfTmfContext, CtfIterator>();
- fRandomAccess = new ArrayList<CtfTmfContext>();
+ fMap = new HashMap<>();
+ fRandomAccess = new ArrayList<>();
fRnd = new Random(System.nanoTime());
fTrace = trace;
}
@Override
public Set<String> listCustomAttributes() {
if (fDeclaration == null) {
- return new HashSet<String>();
+ return new HashSet<>();
}
return fDeclaration.getCustomAttributes();
}
* mess, and put them into something ITmfEventField can cope with.
*/
private static CtfTmfEventField[] parseFields(EventDefinition eventDef) {
- List<CtfTmfEventField> fields = new ArrayList<CtfTmfEventField>();
+ List<CtfTmfEventField> fields = new ArrayList<>();
StructDefinition structFields = eventDef.getFields();
for (Map.Entry<String, Definition> entry : structFields.getDefinitions().entrySet()) {
String curFieldName = null;
Definition curFieldDef;
CtfTmfEventField curField;
- List<ITmfEventField> list = new ArrayList<ITmfEventField>();
+ List<ITmfEventField> list = new ArrayList<>();
/* Recursively parse the fields */
for (Entry<String, Definition> entry : strDef.getDefinitions().entrySet()) {
curFieldName = entry.getKey();
@Override
public synchronized String getFormattedValue() {
if (formattedValue == null) {
- List<String> strings = new ArrayList<String>();
+ List<String> strings = new ArrayList<>();
for (long value : getValue()) {
strings.add(IntegerDefinition.formatNumber(value, base, signed));
}
@Override
public synchronized String getFormattedValue() {
if (formattedValue == null) {
- List<String> strings = new ArrayList<String>();
+ List<String> strings = new ArrayList<>();
for (CtfTmfEventField element : getValue()) {
strings.add(element.getFormattedValue());
}
/* Fill the fFieldNames and fNameMapping structures */
final int nbFields = (fFields != null) ? fFields.length : 0;
fFieldNames = new String[nbFields];
- fNameMapping = new HashMap<String, ITmfEventField>();
+ fNameMapping = new HashMap<>();
for (int i = 0; i < nbFields; i++) {
final String curName = fFields[i].getName();
* The singleton constructor
*/
private TmfEventTypeManager() {
- fEventTypes = new HashMap<String, HashMap<String, ITmfEventType>>();
+ fEventTypes = new HashMap<>();
}
/**
public synchronized void add(final String context, final ITmfEventType type) {
HashMap<String, ITmfEventType> types = fEventTypes.get(context);
if (types == null) {
- types = new HashMap<String, ITmfEventType>();
+ types = new HashMap<>();
}
types.put(type.getName(), type);
fEventTypes.put(context, types);
* Constructor
*/
public TmfEventMatches() {
- fMatches = new ArrayList<TmfEventDependency>();
+ fMatches = new ArrayList<>();
}
/**
*/
private final IMatchProcessingUnit fMatches;
- private static final Map<MatchingType, List<ITmfMatchEventDefinition>> fMatchDefinitions = new HashMap<MatchingType, List<ITmfMatchEventDefinition>>();
+ private static final Map<MatchingType, List<ITmfMatchEventDefinition>> fMatchDefinitions = new HashMap<>();
- private final Map<ITmfTrace, ITmfMatchEventDefinition> fMatchMap = new HashMap<ITmfTrace, ITmfMatchEventDefinition>();
+ private final Map<ITmfTrace, ITmfMatchEventDefinition> fMatchMap = new HashMap<>();
/**
* Constructor with multiple traces and a match processing object
/**
* Hashtables for unmatches incoming events
*/
- private final List<Map<List<Object>, ITmfEvent>> fUnmatchedIn = new ArrayList<Map<List<Object>, ITmfEvent>>();
+ private final List<Map<List<Object>, ITmfEvent>> fUnmatchedIn = new ArrayList<>();
/**
* Hashtables for unmatches outgoing events
*/
- private final List<Map<List<Object>, ITmfEvent>> fUnmatchedOut = new ArrayList<Map<List<Object>, ITmfEvent>>();
+ private final List<Map<List<Object>, ITmfEvent>> fUnmatchedOut = new ArrayList<>();
/**
* Enum for in and out types
@Override
public List<String> getValidChildren() {
- return new ArrayList<String>(0);
+ return new ArrayList<>(0);
}
@Override
@Override
public List<String> getValidChildren() {
- return new ArrayList<String>(0);
+ return new ArrayList<>(0);
}
@Override
@Override
public List<String> getValidChildren() {
- return new ArrayList<String>(0);
+ return new ArrayList<>(0);
}
@Override
if (getChildrenCount() == 0) {
return super.getValidChildren();
}
- return new ArrayList<String>(0); // only one child allowed
+ return new ArrayList<>(0); // only one child allowed
}
@Override
@Override
public List<String> getValidChildren() {
- return new ArrayList<String>(0);
+ return new ArrayList<>(0);
}
@Override
if (getChildrenCount() == 0) {
return super.getValidChildren();
}
- return new ArrayList<String>(0); // only one child allowed
+ return new ArrayList<>(0); // only one child allowed
}
@Override
};
private ITmfFilterTreeNode parent = null;
- private ArrayList<ITmfFilterTreeNode> children = new ArrayList<ITmfFilterTreeNode>();
+ private ArrayList<ITmfFilterTreeNode> children = new ArrayList<>();
/**
* @param parent
try {
TmfFilterTreeNode clone = (TmfFilterTreeNode) super.clone();
clone.parent = null;
- clone.children = new ArrayList<ITmfFilterTreeNode>(children.size());
+ clone.children = new ArrayList<>(children.size());
for (ITmfFilterTreeNode child : getChildren()) {
clone.addChild(child.clone());
}
*/
public TmfFilterContentHandler() {
super();
- fFilterTreeStack = new Stack<ITmfFilterTreeNode>();
+ fFilterTreeStack = new Stack<>();
}
/**
// Note: listeners could be restricted to ITmfComponents but there is no
// harm in letting anyone use this since it is not tied to anything but
// the signal data type.
- private static Map<Object, Method[]> fListeners = new HashMap<Object, Method[]>();
- private static Map<Object, Method[]> fVIPListeners = new HashMap<Object, Method[]>();
+ private static Map<Object, Method[]> fListeners = new HashMap<>();
+ private static Map<Object, Method[]> fVIPListeners = new HashMap<>();
// If requested, add universal signal tracer
// TODO: Temporary solution: should be enabled/disabled dynamically
* @return
*/
private static Method[] getSignalHandlerMethods(Object listener) {
- List<Method> handlers = new ArrayList<Method>();
+ List<Method> handlers = new ArrayList<>();
Method[] methods = listener.getClass().getMethods();
for (Method method : methods) {
if (method.isAnnotationPresent(TmfSignalHandler.class)) {
// Build the list of listener methods that are registered for this signal
Class<?> signalClass = signal.getClass();
- Map<Object, List<Method>> targets = new HashMap<Object, List<Method>>();
+ Map<Object, List<Method>> targets = new HashMap<>();
targets.clear();
for (Map.Entry<Object, Method[]> entry : listeners.entrySet()) {
- List<Method> matchingMethods = new ArrayList<Method>();
+ List<Method> matchingMethods = new ArrayList<>();
for (Method method : entry.getValue()) {
if (method.getParameterTypes()[0].isAssignableFrom(signalClass)) {
matchingMethods.add(method);
Class<? extends ITmfEvent> eventType, String id) {
this.trace = trace;
this.eventType = eventType;
- eventsQueue = new ArrayBlockingQueue<ITmfEvent>(DEFAULT_EVENTS_QUEUE_SIZE);
+ eventsQueue = new ArrayBlockingQueue<>(DEFAULT_EVENTS_QUEUE_SIZE);
ssAssigned = false;
String id2 = (id == null ? "Unamed" : id); //$NON-NLS-1$
@Override
public Map<String, ITmfStateSystem> getStateSystems() {
- Map<String, ITmfStateSystem> map = new HashMap<String, ITmfStateSystem>();
+ Map<String, ITmfStateSystem> map = new HashMap<>();
map.put(getId(), fStateSystem);
return map;
}
private static List<ITmfStateInterval> queryAttributeRange(ITmfStateSystem ss,
long t1, long t2, int baseQuark, String featureString)
throws AttributeNotFoundException, TimeRangeException, StateValueTypeException {
- Pair<Long, Long> timeRange = new Pair<Long, Long>(t1, t2);
+ Pair<Long, Long> timeRange = new Pair<>(t1, t2);
int mipmapQuark = -1;
- List<ITmfStateInterval> intervals = new ArrayList<ITmfStateInterval>();
+ List<ITmfStateInterval> intervals = new ArrayList<>();
try {
try {
mipmapQuark = ss.getQuarkRelative(baseQuark, featureString);
}
long startTime = Math.max(timeRange.getFirst(),
Math.min(currentLevelInterval.getEndTime() + 1, timeRange.getSecond()));
- return new Pair<Long, Long>(startTime, timeRange.getSecond());
+ return new Pair<>(startTime, timeRange.getSecond());
}
private static boolean isFullyOverlapped(Pair<Long, Long> range,
HistogramQueryRequest req = new HistogramQueryRequest(borders, end);
sendAndWait(req);
- List<Long> results = new LinkedList<Long>(req.getResults());
+ List<Long> results = new LinkedList<>(req.getResults());
return results;
}
public StatsPerTypeRequest(ITmfTrace trace, TmfTimeRange range) {
super(trace.getEventType(), range, 0, ITmfEventRequest.ALL_DATA,
ITmfEventRequest.ExecutionType.BACKGROUND);
- this.stats = new HashMap<String, Long>();
+ this.stats = new HashMap<>();
}
public Map<String, Long> getResults() {
ITmfEventRequest.ExecutionType.BACKGROUND);
/* Prepare the results map, with all counts at 0 */
- results = new TreeMap<Long, Long>();
+ results = new TreeMap<>();
for (long border : borders) {
results.put(border, 0L);
}
@Override
public List<Long> histogramQuery(final long start, final long end, final int nb) {
- final List<Long> list = new LinkedList<Long>();
+ final List<Long> list = new LinkedList<>();
final long increment = (end - start) / nb;
if (!totalsStats.waitUntilBuilt()) {
/* We need the complete state history to be built to answer this. */
typesStats.waitUntilBuilt();
- Map<String, Long> map = new HashMap<String, Long>();
+ Map<String, Long> map = new HashMap<>();
long endTime = typesStats.getCurrentEndTime();
try {
// end time, and answer as soon as possible...
typesStats.waitUntilBuilt();
- Map<String, Long> map = new HashMap<String, Long>();
+ Map<String, Long> map = new HashMap<>();
/* Make sure the start/end times are within the state history, so we
* don't get TimeRange exceptions.
* Initialization of the attributes
*/
public SyncAlgorithmFullyIncremental() {
- fSyncs = new LinkedList<ConvexHull>();
+ fSyncs = new LinkedList<>();
}
/**
@Override
public Map<String, Map<String, Object>> getStats() {
- Map<String, Map<String, Object>> statmap = new LinkedHashMap<String, Map<String, Object>>();
+ Map<String, Map<String, Object>> statmap = new LinkedHashMap<>();
for (ConvexHull traceSync : fSyncs) {
statmap.put(traceSync.getReferenceTrace() + " <==> " + traceSync.getOtherTrace(), traceSync.getStats()); //$NON-NLS-1$
}
* The list of meaningful points on the upper hull (received by the
* reference trace, below in a graph)
*/
- private final LinkedList<SyncPoint> fUpperBoundList = new LinkedList<SyncPoint>();
+ private final LinkedList<SyncPoint> fUpperBoundList = new LinkedList<>();
/**
* The list of meaninful points on the lower hull (sent by the reference
* trace, above in a graph)
*/
- private final LinkedList<SyncPoint> fLowerBoundList = new LinkedList<SyncPoint>();
+ private final LinkedList<SyncPoint> fLowerBoundList = new LinkedList<>();
/** Points forming the line with maximum slope */
private final SyncPoint[] fLmax;
private String fReferenceTrace = "", fOtherTrace = ""; //$NON-NLS-1$//$NON-NLS-2$
private SyncQuality fQuality;
- private Map<String, Object> fStats = new LinkedHashMap<String, Object>();
+ private Map<String, Object> fStats = new LinkedHashMap<>();
/**
* Initialization of the attributes
return;
}
- FileInputStream fis = new FileInputStream(syncFile);
- ByteBuffer buffer = ByteBuffer.allocate(HEADER_SIZE);
- FileChannel fc = fis.getChannel();
- buffer.clear();
- fc.read(buffer);
- buffer.flip();
-
- /*
- * Check the magic number,to make sure we're opening the right type of
- * file
- */
- res = buffer.getInt();
- if (res != SYNC_FILE_MAGIC_NUMBER) {
- fc.close();
- fis.close();
- throw new IOException("Selected file does not" + //$NON-NLS-1$
- "look like a synchronization file"); //$NON-NLS-1$
- }
+ try (FileInputStream fis = new FileInputStream(syncFile);
+ FileChannel fc = fis.getChannel();) {
+ ByteBuffer buffer = ByteBuffer.allocate(HEADER_SIZE);
+ buffer.clear();
+ fc.read(buffer);
+ buffer.flip();
- res = buffer.getInt(); /* Major version number */
- if (res != FILE_VERSION) {
- fc.close();
- fis.close();
- throw new IOException("Select synchronization file is of an older " //$NON-NLS-1$
- + "format. Synchronization will have to be computed again."); //$NON-NLS-1$
- }
+ /*
+ * Check the magic number,to make sure we're opening the right type
+ * of file
+ */
+ res = buffer.getInt();
+ if (res != SYNC_FILE_MAGIC_NUMBER) {
+ throw new IOException("Selected file does not" + //$NON-NLS-1$
+ "look like a synchronization file"); //$NON-NLS-1$
+ }
- res = buffer.getInt(); /* Minor version number */
+ res = buffer.getInt(); /* Major version number */
+ if (res != FILE_VERSION) {
+ throw new IOException("Select synchronization file is of an older " //$NON-NLS-1$
+ + "format. Synchronization will have to be computed again."); //$NON-NLS-1$
+ }
- fc.close();
- fis.close();
+ res = buffer.getInt(); /* Minor version number */
+ }
}
/**
return null;
}
- /* Set the position after the header */
- FileInputStream fis = new FileInputStream(fSyncFile);
- FileChannel fc = fis.getChannel().position(HEADER_SIZE);
+ try (/* Set the position after the header */
+ FileInputStream fis = new FileInputStream(fSyncFile);
+ FileChannel fc = fis.getChannel().position(HEADER_SIZE);
+ /* Read the input stream */
+ ObjectInputStream ois = new ObjectInputStream(fis);) {
- /* Read the input stream */
- ObjectInputStream ois = new ObjectInputStream(fis);
- SyncAlgorithmFullyIncremental syncAlgo = null;
- try {
- syncAlgo = (SyncAlgorithmFullyIncremental) ois.readObject();
+ return (SynchronizationAlgorithm) ois.readObject();
} catch (ClassNotFoundException e) {
-
+ return null;
}
- ois.close();
- fc.close();
- fis.close();
- return syncAlgo;
}
/**
return;
}
- FileChannel fc;
- FileOutputStream fos;
- ObjectOutputStream oos;
- ByteBuffer buffer;
- int res;
-
- fos = new FileOutputStream(fSyncFile, false);
- fc = fos.getChannel();
+ /* Save the header of the file */
+ try (FileOutputStream fos = new FileOutputStream(fSyncFile, false);
+ FileChannel fc = fos.getChannel();) {
- buffer = ByteBuffer.allocate(HEADER_SIZE);
- buffer.clear();
+ ByteBuffer buffer = ByteBuffer.allocate(HEADER_SIZE);
+ buffer.clear();
- /* Save the header of the file */
- try {
fc.position(0);
buffer.putInt(SYNC_FILE_MAGIC_NUMBER);
buffer.putInt(FILE_VERSION);
buffer.flip();
- res = fc.write(buffer);
+ int res = fc.write(buffer);
assert (res <= HEADER_SIZE);
/* done writing the file header */
fc.position(HEADER_SIZE);
- oos = new ObjectOutputStream(fos);
- oos.writeObject(syncAlgo);
- oos.close();
+ try (ObjectOutputStream oos = new ObjectOutputStream(fos);) {
+ oos.writeObject(syncAlgo);
+ }
+ } catch (FileNotFoundException e) {
+ /* Send this upwards */
+ throw e;
} catch (IOException e) {
- /* We should not have any problems at this point... */
+ /* Handle other cases of IOException's */
Activator.logError("Error saving trace synchronization data", e); //$NON-NLS-1$
- } finally {
- try {
- fc.close();
- fos.close();
- } catch (IOException e) {
- Activator.logError("Error closing synchronization file", e); //$NON-NLS-1$
- }
}
return;
}
private static Map<String, String> getPreferenceMap(boolean defaultValues) {
- Map<String, String> prefsMap = new HashMap<String, String>();
+ Map<String, String> prefsMap = new HashMap<>();
IEclipsePreferences prefs = defaultValues ? DefaultScope.INSTANCE.getNode(Activator.PLUGIN_ID) : InstanceScope.INSTANCE.getNode(Activator.PLUGIN_ID);
prefToMap(prefs, prefsMap, ITmfTimePreferencesConstants.SUBSEC, SUBSEC_DEFAULT);
prefToMap(prefs, prefsMap, ITmfTimePreferencesConstants.TIME_DELIMITER, TIME_DELIMITER_DEFAULT);
private String fPattern;
// The timestamp pattern
- private List<String> fSupplPatterns = new ArrayList<String>();
+ private List<String> fSupplPatterns = new ArrayList<>();
/**
* The supplementary pattern letters. Can be redefined by sub-classes
import java.io.File;
import java.io.FileInputStream;
-import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
* @since 2.0
*/
protected final Map<String, ITmfStateSystem> fStateSystems =
- new LinkedHashMap<String, ITmfStateSystem>();
+ new LinkedHashMap<>();
private ITmfTimestampTransform fTsTransform;
private final Map<String, IAnalysisModule> fAnalysisModules =
- new LinkedHashMap<String, IAnalysisModule>();
+ new LinkedHashMap<>();
private static final String SYNCHRONIZATION_FORMULA_FILE = "sync_formula"; //$NON-NLS-1$
*/
@Override
public <T> Map<String, T> getAnalysisModules(Class<T> moduleclass) {
- Map<String, T> modules = new HashMap<String, T>();
+ Map<String, T> modules = new HashMap<>();
for (Entry<String, IAnalysisModule> entry : fAnalysisModules.entrySet()) {
if (moduleclass.isAssignableFrom(entry.getValue().getClass())) {
modules.put(entry.getKey(), moduleclass.cast(entry.getValue()));
File sync_file = getSyncFormulaFile();
if (sync_file != null && sync_file.exists()) {
- try {
- FileInputStream fis = new FileInputStream(sync_file);
- ObjectInputStream ois = new ObjectInputStream(fis);
+ try (FileInputStream fis = new FileInputStream(sync_file);
+ ObjectInputStream ois = new ObjectInputStream(fis);) {
+
fTsTransform = (ITmfTimestampTransform) ois.readObject();
- ois.close();
- fis.close();
- } catch (ClassNotFoundException e1) {
- fTsTransform = TmfTimestampTransform.IDENTITY;
- } catch (FileNotFoundException e1) {
- fTsTransform = TmfTimestampTransform.IDENTITY;
- } catch (IOException e1) {
+ } catch (ClassNotFoundException | IOException e) {
fTsTransform = TmfTimestampTransform.IDENTITY;
}
} else {
// ------------------------------------------------------------------------
private TmfTraceManager() {
- fTraces = new LinkedHashMap<ITmfTrace, TmfTraceContext>();
+ fTraces = new LinkedHashMap<>();
TmfSignalManager.registerVIP(this);
}