Skip to content

Commit

Permalink
Fixed all the documentation issues found by generating the javadoc
Browse files Browse the repository at this point in the history
  • Loading branch information
PhRX committed Feb 9, 2017
1 parent 5d1ece9 commit 6d4a909
Show file tree
Hide file tree
Showing 41 changed files with 181 additions and 106 deletions.
Expand Up @@ -30,7 +30,7 @@ public interface Block
{
/**
* @return false iff you're ready to stop, true otherwise
* @throws Exception
* @throws Exception when things go bump
*/
boolean run() throws Exception;
}
Expand Down
Expand Up @@ -112,6 +112,8 @@ public Tree getTree(CombinedGrouping grouping)
*
* The {@link LeanThreadNode}s, which are the root {@link Node}s in the {@link LeanProfile}, are themselves already
* aggregations of all their descendant Frame {@link LeanNode}s, so we don't need to descend any further.
*
* @return the {@link NumericInfo} containing the aggregated data
*/
private NumericInfo aggregateGlobal()
{
Expand Down
Expand Up @@ -21,7 +21,7 @@ public final class ReferenceUtil
* @param tree the {@link Tree} whose references are to be changed
* @param mode the strategy for setting the references
*/
public static <K> void switchReference(Tree tree, ReferenceMode mode)
public static void switchReference(Tree tree, ReferenceMode mode)
{
switch (mode)
{
Expand Down Expand Up @@ -52,7 +52,7 @@ public static <K> void switchReference(Tree tree, ReferenceMode mode)
* @param treeDiff the {@link TreeDiff} whose references are to be changed
* @param mode the strategy for setting the references
*/
public static <K> void switchReference(TreeDiff treeDiff, ReferenceMode mode)
public static void switchReference(TreeDiff treeDiff, ReferenceMode mode)
{
switchReference(treeDiff.getBaseAggregation(), mode);
switchReference(treeDiff.getNewAggregation(), mode);
Expand All @@ -68,7 +68,7 @@ public static <K> void switchReference(TreeDiff treeDiff, ReferenceMode mode)
* @param parent the parent {@link Node}
* @param child the child {@link Node}
*/
private static <K> void setReferenceToParent(Node parent, Node child)
private static void setReferenceToParent(Node parent, Node child)
{

if (parent == null)
Expand Down
Expand Up @@ -11,7 +11,6 @@
import com.insightfullogic.honest_profiler.core.aggregation.result.straight.Flat;
import com.insightfullogic.honest_profiler.core.aggregation.result.straight.Node;
import com.insightfullogic.honest_profiler.core.aggregation.result.straight.Tree;
import com.insightfullogic.honest_profiler.core.profiles.lean.LeanNode;

/**
* Aggregator which takes a {@link Node}, and aggregates the values of the {@link Node} and its descendants into a
Expand All @@ -23,7 +22,7 @@ public class DescendantFlatAggregator implements SubAggregator<Node, Entry>
* This method aggregates all descendants of a {@link Node} into a {@link Flat}, using the original
* {@link CombinedGrouping} from the {@link Tree} the {@link Node} belongs to.
*
* @see SubAggregator#aggregate(Object, LeanNode)
* @see SubAggregator#aggregate(Object)
*/
@Override
public Flat aggregate(Node parent)
Expand Down
Expand Up @@ -7,7 +7,7 @@

/**
* Generic interface for aggregation functions which operate on the entire {@link AggregationProfile}. An Aggregator
* aggregates an input {@link AggregationProfile} into an {@link Aggregation} containing results of type <T>, which are
* aggregates an input {@link AggregationProfile} into an {@link Aggregation} containing results of type T, which are
* keyed by a String.
*
* @param <T> the type of the content items in the resulting {@link Aggregation}
Expand All @@ -18,6 +18,7 @@ public interface ProfileAggregator<T extends Keyed<String>>
* Aggregate the provided {@link AggregationProfile}.
*
* @param input the {@link AggregationProfile} to be aggregated
* @param grouping the {@link CombinedGrouping} to be used when aggregating
* @return the resulting {@link Aggregation}
*/
Aggregation<T> aggregate(AggregationProfile input, CombinedGrouping grouping);
Expand Down
Expand Up @@ -5,10 +5,10 @@

/**
* Generic interface for aggregation functions which operate on already aggregated items. An Aggregator aggregates input
* of type <I> into an {@link Aggregation} containing results of type <T>, which are keyed by a key of type String.
* of type I into an {@link Aggregation} containing results of type T, which are keyed by a key of type String.
*
* @param <I> the type of the input being aggregated
* @param <T> the type of the content items in the resulting {@link Aggregation}
* @param I the type of the input being aggregated
* @param T the type of the content items in the resulting {@link Aggregation}
*/
public interface SubAggregator<I, T extends Keyed<String>>
{
Expand Down
Expand Up @@ -12,7 +12,7 @@
* whether the item fulfills the condition.
*
* It would have been nice to be able to parametrize the Enum, it might have been possible to make the internal factory
* methods lighter. May be revisited if {@link http://openjdk.java.net/jeps/301} gets implemented.
* methods lighter. May be revisited if <a href="http://openjdk.java.net/jeps/301">JEP 301</a> gets implemented.
*/
public enum Comparison
{
Expand Down Expand Up @@ -194,6 +194,7 @@ private Comparison(String name)
/**
* Returns a {@link Predicate} which will evaluate this comparison against the provided value.
*
* @param <T> the type of the {@link Object} tested by the {@link Predicate}
* @param value the value the {@link Predicate} will compare against
* @return a {@link Predicate} which compares input to the provided value
*/
Expand Down
Expand Up @@ -74,8 +74,8 @@ public U getValue()
// Filter Creation

/**
* Creates the {@link FilterPredicate} which can filter items of type <T> by extracting the value specified by
* {@link Target} from them. The {@link ItemType} explicity describes the type <T>, and is needed for implementation
* Creates the {@link FilterPredicate} which can filter items of type T by extracting the value specified by
* {@link Target} from them. The {@link ItemType} explicity describes the type T, and is needed for implementation
* reasons.
*
* @param type the type of the item the {@link FilterPredicate} can be used on
Expand Down
Expand Up @@ -52,6 +52,8 @@ public FilterSpecification(ItemType type)
* specified {@link ItemType}.
*
* @param type the type of items the filter can filter
* @param hideErrors a boolean specifying if error frames should be filtered out
* @param filters a {@link List} of the contained {@link FilterItem}s
*/
public FilterSpecification(ItemType type, boolean hideErrors, List<FilterItem<T, ?>> filters)
{
Expand Down Expand Up @@ -91,7 +93,7 @@ public boolean isFiltering()
/**
* Sets the quickfilter String, which when not empty will generate an extra {@link Predicate} for filtering the key.
*
* @param value
* @param value the value used for filtering
*/
public void setQuickFilter(String value)
{
Expand All @@ -101,7 +103,7 @@ public void setQuickFilter(String value)
// Filter Construction Methods

/**
* Generates a {@link Predicate} which accepts items of type <T> if they are accepted by all of the filters from all
* Generates a {@link Predicate} which accepts items of type T if they are accepted by all of the filters from all
* contained {@link FilterItem}s, and optionally if they do not contain errors and/or if the key contains the String
* specified by the quickfilter String.
*
Expand Down
Expand Up @@ -150,6 +150,8 @@ public ValueType getType()
* Returns the extractor {@link Function} which can extract the Target value from the aggregation items with the
* specified {@link ValueType}.
*
* @param <T> the type of the input of the extractor {@link Function}
* @param <U> the type of the result of the extractor {@link Function}
* @param type the {@link ValueType} of the aggregation items the extractor will accept
* @return the extractor {@link Function} for extracting the Target value
*/
Expand Down
Expand Up @@ -38,7 +38,8 @@ public enum ValueType
* Returns a validator {@link Predicate} which tests the String by trying to apply corresponding the convertor. If
* that operation throws an Exception, the String cannot be converted.
*
* @param convertor the convertor {@link Function} which converts a String to a value of the specified type <T>
* @param <T> the type of the result of the convertor {@link Function}
* @param convertor the convertor {@link Function} which converts a String to a value of the specified type T
* @return a validator {@link Predicate}
*/
private static final <T> Predicate<String> validatorFor(Function<String, T> convertor)
Expand Down Expand Up @@ -133,6 +134,7 @@ public Predicate<String> getValidator()
/**
* Returns a {@link Function} for converting a String to a value of this type.
*
* @param <T> the type of the result of the conversion {@link Function}
* @return a {@link Function} for converting a String to a value of this type
*/
@SuppressWarnings("unchecked")
Expand Down
Expand Up @@ -48,7 +48,7 @@ private CombinedGrouping(ThreadGrouping threadGrouping, FrameGrouping frameGroup
*
* @param profile the input {@link AggregationProfile}
* @param node the {@link LeanNode} for which the key will be calculated
* @return
* @return the calculated key
*/
@Override
public String apply(AggregationProfile profile, LeanNode node)
Expand Down
Expand Up @@ -18,17 +18,17 @@
public enum FrameGrouping implements BiFunction<AggregationProfile, LeanNode, String>
{
/**
* Group frames by Fully Qualified Method Name. The constructed key is "<Fully Qualified ClassName>.<Method Name>".
* Group frames by Fully Qualified Method Name. The constructed key is "[Fully Qualified ClassName].[Method Name]".
*/
BY_FQMN("By FQMN", (profile, node) -> profile.getSource().getMethodInfoMap().get(node.getFrame().getMethodId()).getFqmn()),
/**
* Group frames by Fully Qualified Method Name and line number. The constructed key is "<Fully Qualified
* ClassName>.<Method Name>:<Line Number>".
* Group frames by Fully Qualified Method Name and line number. The constructed key is "[Fully Qualified
* ClassName].[Method Name]:[Line Number]".
*/
BY_FQMN_LINENR("By FQMN + Line Nr", (profile, node) -> profile.getSource().getFqmnPlusLineNr(node)),
/**
* Group frames by Fully Qualified Method Name and BCI (byte code index). The constructed key is "<Fully Qualified
* ClassName>.<Method Name>:<BCI>".
* Group frames by Fully Qualified Method Name and BCI (byte code index). The constructed key is "[Fully Qualified
* ClassName].[Method Name]:[BCI]".
*/
BY_BCI("By FQMN + BCI", (profile, node) -> profile.getSource().getBciKey(node));

Expand Down
Expand Up @@ -32,6 +32,7 @@ public abstract class AbstractDiff<T extends Keyed<String>, U extends Keyed<Stri
* Sets the {@link Aggregation}s which will be compared.
*
* @param baseAggregation the Base Aggregation
* @param newAggregation the New Aggregation
*/
protected void setAggregations(V baseAggregation, V newAggregation)
{
Expand Down
Expand Up @@ -33,7 +33,7 @@ public DiffEntry(Entry baseEntry, Entry newEntry)
* Sets the Base {@link Entry}.
*
* The return value is provided as a convenience for
* {@link FlatDiff#setBase(com.insightfullogic.honest_profiler.core.aggregation.result.straight.Flat)}.
* {@link FlatDiff#set(com.insightfullogic.honest_profiler.core.aggregation.result.straight.Flat, com.insightfullogic.honest_profiler.core.aggregation.result.straight.Flat)}.
*
* @param entry the Base {@link Entry}
* @return this {@link DiffEntry}
Expand All @@ -48,7 +48,7 @@ public DiffEntry setBase(Entry entry)
* Sets the New {@link Entry}.
*
* The return value is provided as a convenience for
* {@link FlatDiff#setNew(com.insightfullogic.honest_profiler.core.aggregation.result.straight.Flat)}.
* {@link FlatDiff#set(com.insightfullogic.honest_profiler.core.aggregation.result.straight.Flat, com.insightfullogic.honest_profiler.core.aggregation.result.straight.Flat)}.
*
* @param entry the New {@link Entry}
* @return this {@link DiffEntry}
Expand Down Expand Up @@ -87,6 +87,8 @@ public String getKey()

/**
* @see Entry#getSelfTime()
*
* @return the Base aggregated self time in nanoseconds
*/
public long getBaseSelfTime()
{
Expand All @@ -95,6 +97,8 @@ public long getBaseSelfTime()

/**
* @see Entry#getTotalTime()
*
* @return the Base aggregated total time in nanoseconds
*/
public long getBaseTotalTime()
{
Expand All @@ -103,6 +107,8 @@ public long getBaseTotalTime()

/**
* @see Entry#getSelfCnt()
*
* @return the Base aggregated self sample count
*/
public int getBaseSelfCnt()
{
Expand All @@ -111,6 +117,8 @@ public int getBaseSelfCnt()

/**
* @see Entry#getTotalCnt()
*
* @return the Base aggregated total sample count
*/
public int getBaseTotalCnt()
{
Expand All @@ -119,6 +127,8 @@ public int getBaseTotalCnt()

/**
* @see Entry#getSelfTimePct()
*
* @return the Base self time divided by the Base reference total time
*/
public double getBaseSelfTimePct()
{
Expand All @@ -127,6 +137,8 @@ public double getBaseSelfTimePct()

/**
* @see Entry#getTotalTimePct()
*
* @return the Base total time divided by the Base reference total time
*/
public double getBaseTotalTimePct()
{
Expand All @@ -135,6 +147,8 @@ public double getBaseTotalTimePct()

/**
* @see Entry#getSelfCntPct()
*
* @return the Base self sample count divided by the Base reference total sample count
*/
public double getBaseSelfCntPct()
{
Expand All @@ -143,14 +157,18 @@ public double getBaseSelfCntPct()

/**
* @see Entry#getTotalCntPct()
*
* @return the Base total sample count divided by the Base reference total sample count
*/
public double getBaseTotalCntPct()
{
return baseEntry.getTotalCntPct();
}

/**
* @see Entry#getSelfTime()()
* @see Entry#getSelfTime()
*
* @return the New aggregated self time in nanoseconds
*/
public long getNewSelfTime()
{
Expand All @@ -159,6 +177,8 @@ public long getNewSelfTime()

/**
* @see Entry#getTotalTime()
*
* @return the New aggregated total time in nanoseconds
*/
public long getNewTotalTime()
{
Expand All @@ -167,6 +187,8 @@ public long getNewTotalTime()

/**
* @see Entry#getSelfCnt()
*
* @return the New aggregated self sample count
*/
public int getNewSelfCnt()
{
Expand All @@ -175,6 +197,8 @@ public int getNewSelfCnt()

/**
* @see Entry#getTotalCnt()
*
* @return the New aggregated total sample count
*/
public int getNewTotalCnt()
{
Expand All @@ -183,6 +207,8 @@ public int getNewTotalCnt()

/**
* @see Entry#getSelfTimePct()
*
* @return the New self time divided by the New reference total time
*/
public double getNewSelfTimePct()
{
Expand All @@ -191,6 +217,8 @@ public double getNewSelfTimePct()

/**
* @see Entry#getTotalTimePct()
*
* @return the New total time divided by the New reference total time
*/
public double getNewTotalTimePct()
{
Expand All @@ -199,6 +227,8 @@ public double getNewTotalTimePct()

/**
* @see Entry#getSelfCntPct()
*
* @return the New self sample count divided by the New reference total sample count
*/
public double getNewSelfCntPct()
{
Expand All @@ -207,6 +237,8 @@ public double getNewSelfCntPct()

/**
* @see Entry#getTotalCntPct()
*
* @return the New total sample count divided by the New reference total sample count
*/
public double getNewTotalCntPct()
{
Expand All @@ -215,6 +247,8 @@ public double getNewTotalCntPct()

/**
* @see Entry#getRefCnt()
*
* @return the Base reference total sample count
*/
public int getBaseRefCnt()
{
Expand All @@ -223,6 +257,8 @@ public int getBaseRefCnt()

/**
* @see Entry#getRefCnt()
*
* @return the New reference total sample count
*/
public int getNewRefCnt()
{
Expand Down

0 comments on commit 6d4a909

Please sign in to comment.