module metrics_base
import model_utils
+import modelbuilder
import csv
import counter
import console
var opt_generate_hyperdoc = new OptionBool("Generate Hyperdoc", "--generate_hyperdoc")
# --poset
var opt_poset = new OptionBool("Complete metrics on posets", "--poset")
-
# --no-colors
var opt_nocolors = new OptionBool("Disable colors in console outputs", "--no-colors")
-
-
+ # --dir
var opt_dir = new OptionString("Directory where some statistics files are generated", "-d", "--dir")
+
+ # Output directory for metrics files.
var output_dir: String = "."
redef init
self.option_context.add_option(opt_nocolors)
end
- redef fun process_options
+ redef fun process_options(args)
do
super
var val = self.opt_dir.value
end
end
- # colorize heading 1 for console output
+ # Format and colorize a string heading of level 1 for console output.
+ #
+ # Default style is yellow and bold.
fun format_h1(str: String): String do
if opt_nocolors.value then return str
return str.yellow.bold
end
+ # Format and colorize a string heading of level 2 for console output.
+ #
+ # Default style is white and bold.
fun format_h2(str: String): String do
if opt_nocolors.value then return str
return str.bold
end
+ # Format and colorize a string heading of level 3 for console output.
+ #
+ # Default style is white and nobold.
fun format_h3(str: String): String do
if opt_nocolors.value then return str
return str
end
+ # Format and colorize a string heading of level 4 for console output.
+ #
+ # Default style is green.
fun format_h4(str: String): String do
if opt_nocolors.value then return str
return str.green
end
+ # Format and colorize a string heading of level 5 for console output.
+ #
+ # Default style is light gray.
fun format_p(str: String): String do
if opt_nocolors.value then return str
return str.light_gray
#
# The concept is reified here for a better organization and documentation
interface Metric
+
+ # Type of elements measured by this metric.
type ELM: Object
+
+ # Type of values used to measure elements.
type VAL: Object
+
+ # Type of data representation used to associate elements and values.
type RES: Map[ELM, VAL]
+ # The name of this metric (generally an acronym about the metric).
fun name: String is abstract
+
+ # A long and understandable description about what is measured by this metric.
fun desc: String is abstract
# Clear all results for this metric
# Pretty print the metric results in console
fun to_console(indent: Int, colors: Bool) do
+ if values.is_empty then
+ if colors then
+ print "{"\t" * indent}{name}: {desc} -- nothing".green
+ else
+ print "{"\t" * indent}{name}: {desc} -- nothing"
+ end
+ return
+ end
+
var max = self.max
var min = self.min
if colors then
end
end
+ # The sum of all the values.
+ fun sum: VAL is abstract
+
# The values standard derivation
fun std_dev: Float is abstract
# The set of element above the threshold
fun above_threshold: Set[ELM] is abstract
+
+ # Sort the metric keys by values
+ fun sort: Array[ELM] do
+ return values.keys_sorted_by_values(default_reverse_comparator)
+ end
end
# A Metric that collects integer data
redef type VAL: Int
redef type RES: Counter[ELM]
+ # `IntMetric` uses a Counter to store values in intern.
protected var values_cache = new Counter[ELM]
+
redef fun values do return values_cache
redef fun clear do values_cache.clear
- fun sum: Int do return values_cache.sum
+ redef fun sum do return values_cache.sum
redef fun max do
assert not values_cache.is_empty
end
# Values average
- redef fun avg: Float do return values_cache.avg
+ redef fun avg do return values_cache.avg
- redef fun std_dev: Float do return values_cache.std_dev
+ redef fun std_dev do return values_cache.std_dev
redef fun above_threshold do
var above = new HashSet[ELM]
end
return above
end
+
+ redef fun to_console(indent, colors) do
+ super
+ if colors then
+ print "{"\t" * indent} sum: {sum}".light_gray
+ else
+ print "{"\t" * indent} sum: {sum}"
+ end
+ end
end
# A Metric that collects float datas
redef type VAL: Float
+ # `FloatMetric` uses a Map to store values in intern.
protected var values_cache = new HashMap[ELM, VAL]
+
redef fun values do return values_cache
redef fun clear do values_cache.clear
- fun sum: Float do
+
+ redef fun sum do
var sum = 0.0
for v in values.values do sum += v
return sum
return sum / values.length.to_f
end
- redef fun std_dev: Float do
+ redef fun std_dev do
var sum = 0.0
for value in values.values do
sum += (value - avg).pow(2.to_f)
end
return above
end
+
+ redef fun to_console(indent, colors) do
+ super
+ if colors then
+ print "{"\t" * indent} sum: {sum}".light_gray
+ else
+ print "{"\t" * indent} sum: {sum}"
+ end
+ end
end
# A MetricSet is a metric holder
#
# It purpose is to be extended with a metric collect service
class MetricSet
+
+ # Type of element measured by this `MetricSet`.
type ELM: Object
# Metrics to compute
end
# Export the metric set in CSV format
- fun to_csv: CSVDocument do
- var csv = new CSVDocument
+ fun to_csv: CsvDocument do
+ var csv = new CsvDocument
+
+ csv.format = new CsvFormat('"', ';', "\n")
# set csv headers
csv.header.add("entry")
line.add("n/a")
end
end
- csv.lines.add(line)
+ csv.records.add(line)
end
return csv
end