Unescaping names in dot files generated by nitdoc seems causes the recent failures on jenkins.
The bug was present since the new nitdoc but expressed by #883 that exposes the name of the project in the dot-file:
* On Jenkins, multiple workspace are identified with a @. eq. `workspace@2`
* The name of a Nit project is the name of the directory
* Therefore the `dot` command aborted when an identifier with @ was found, thus did not produce a map file.
* Therefore nitdoc aborted on the opening of the map file (to include in the HTML)
Retated to #891
Pull-Request: #894
Reviewed-by: Alexis Laferrière <alexis.laf@xymus.net>
var done_tag: Tag = 5.tag
# Number of tasks within each task assignation with `task_tag`
- var tasks_per_packet = 4
+ var tasks_per_packet = 1
# Run the main logic of this node
fun run is abstract
# Gather and registar all tasks
fun create_tasks
do
- var c = 0
- for engine in engines do for prog in test_programs do
+ for prog in test_programs do for engine in engines do
tasks.add new Task(engine, prog)
- c += 1
end
end
end
if res == 5 then result.fail = true
if res == 6 then result.soso = true
if res == 7 then result.skip = true
+ if res == 8 then result.todo = true
+ if res == 9 then result.skip_exec = true
if res == 0 then result.unknown = true
results.add result
print "* {results.fixmes.length} fixmes"
print "* {results.sosos.length} sosos"
print "* {results.skips.length} skips"
+ print "* {results.todos.length} todos"
+ print "* {results.skip_execs.length} skip execs"
print "* {results.unknowns.length} unknowns (bug in tests.sh or nitester)"
end
# Output file directory
var out_dir = "/dev/shm/nit_out{rank}" is lazy
+ # Directory to store the xml files produced for Jenkins
+ var xml_dir = "~/jenkins_xml/"
+
# Output file of the `tests.sh` script
var tests_sh_out = "/dev/shm/nit_local_out{rank}" is lazy
- # Path to the local copy of the Nit repository
- var nit_copy_dir = "/dev/shm/nit{rank}/" is lazy
-
# Source Nit repository, must be already updated and `make` before execution
var nit_source_dir = "~/nit"
fun setup
do
if verbose > 0 then sys.system "hostname"
- sys.system "git clone {nit_source_dir} {nit_copy_dir}"
end
# Clean up the testing environment
do
if comp_dir.file_exists then comp_dir.rmdir
if out_dir.file_exists then out_dir.rmdir
- if nit_copy_dir.file_exists then nit_copy_dir.rmdir
if tests_sh_out.file_exists then tests_sh_out.file_delete
end
# Receive tasks to execute
mpi.recv_into(task_buffer, 0, 1, status.source, status.tag, comm_world)
var first_id = task_buffer[0]
- for task_id in [first_id .. first_id + tasks_per_packet] do
+ for task_id in [first_id .. first_id + tasks_per_packet[ do
# If id is over all known tasks, stop right here
if task_id >= tasks.length then break
var task = tasks[task_id]
# Command line to execute test
- var cmd = "XMLDIR={out_dir} ERRLIST={out_dir}/errlist TMPDIR={out_dir} " +
+ var cmd = "XMLDIR={xml_dir} ERRLIST={out_dir}/errlist TMPDIR={out_dir} " +
"CCACHE_DIR={ccache_dir} CCACHE_TEMPDIR={ccache_dir} CCACHE_BASEDIR={comp_dir} " +
- "./tests.sh --compdir {comp_dir} --outdir {out_dir} -o \"--make-flags '-j1'\"" +
- " --node --engine {task.engine} {nit_copy_dir / "tests" / task.test_program} > {tests_sh_out}"
+ "./tests.sh --compdir {comp_dir} --outdir {out_dir} " +
+ " --node --engine {task.engine} {task.test_program} > {tests_sh_out}"
# Execute test
sys.system cmd
if line.has("[======= fail") then res = 5
if line.has("[======= soso") then res = 6
if line.has("[skip]") then res = 7
+ if line.has("[todo]") then res = 8
+ if line.has("[skip exec]") then res = 9
if res == null then
res = 0
# Is `self` result a _soso_?
var soso = false
- # Is `self` skipped test?
+ # Has `self` been skipped?
var skip = false
+ # Is `self` TODO?
+ var todo = false
+
+ # Has the execution of `self` been skipped?
+ var skip_exec = false
+
# Is `self` an unknown result, probably an error
var unknown = false
if ok_empty then err = "0k"
if fixme then err = "fixme"
if fail then err = "fail"
+ if soso then err = "soso"
+ if skip then err = "skip"
+ if todo then err = "todo"
+ if skip_exec then err = "skip_exec"
return "{task} arg{arg} alt{alt} => {err}"
end
var fails = new HashSet[Result]
var sosos = new HashSet[Result]
var skips = new HashSet[Result]
+ var todos = new HashSet[Result]
+ var skip_execs = new HashSet[Result]
var unknowns = new HashSet[Result]
# TODO remove
if result.fail then fails.add result
if result.soso then sosos.add result
if result.skip then skips.add result
+ if result.todo then todos.add result
+ if result.skip_exec then skip_execs.add result
if result.unknown then unknowns.add result
super
bin/pep8analysis:
mkdir -p bin
- ../../bin/nitg --global -o bin/pep8analysis src/pep8analysis.nit
+ ../../bin/nitg -o bin/pep8analysis src/pep8analysis.nit
doc/index.html:
../../bin/nitdoc src/pep8analysis.nit
default:
- ../../../bin/nitg --global converter.nit
+ ../../../bin/nitg --semi-global converter.nit
HTTPD_PY := python $(NACL_SDK_ROOT)/tools/httpd.py
serve:
2. Declare the environment variable NACL_SDK_ROOT as the root of the target platform within the SDK (ex: ~/nacl_sdk/pepper_34/) :
$ export NACL_SDK_ROOT=/path/to/nacl_sdk/pepper_[your_version]
-3. Compile the Nit code with: `nitg --global converter.nit` or `make`.
+3. Compile the Nit code with: `nitg --semi-global converter.nit` or `make`.
-You must use the '--global' option. Some features in the standard library are not supported by the NaCL platform, the global compiler do not try to compile them.
+You must use the '--semi-global' (or `--global`) option. Some features in the standard library are not supported by the NaCL platform, the global compiler do not try to compile them.
4. Start a local server using: `make serve`.
break
end
- print "Solved, after looking at {r.steps} positions during {c.lapse}"
+ print "Solved, after looking at {r.steps} positions"
pb.print_plan(r.plan)
end
--- /dev/null
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# This file is free software, which comes along with NIT. This software is
+# distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE. You can modify it is you want, provided this header
+# is kept unaltered, and a notification of the changes is added.
+# You are allowed to redistribute it and sell it, alone or is a part of
+# another product.
+
+# Cartesian products on heterogeneous collections.
+#
+# This module is a proof-of-concept to propose memory-efficient views on collections.
+#
+# This is a specific alternative to `combinations`, that focuses only highly efficient
+# Cartesian products between collections of different types.
+#
+# Collection[Int] X Collection[String] -> Collection[(Int,String)]
+#
+# However, in Nit, there in no native *tuple* type.
+# So we need a first building block, a pair.
+
+# A simple read-only pair of two elements `e` and `f`.
+class Pair[E, F]
+ # The first element of the pair
+ var e: E
+
+ # The second element of the pair
+ var f: F
+
+ # The parenthesized notation.
+ #
+ # ~~~
+ # var p = new Pair[Int, String](1, "hello")
+ # assert p.to_s == "(1,hello)"
+ # ~~~
+ redef fun to_s
+ do
+ var es = e or else ""
+ var fs = f or else ""
+ return "({es},{fs})"
+ end
+
+ # Untyped pair equality.
+ #
+ # ~~~
+ # var p1 = new Pair[Object, Object](1, 2)
+ # var p2 = new Pair[Int, Int](1, 2)
+ # var p3 = new Pair[Int, Int](1, 3)
+ #
+ # assert p1 == p2
+ # assert p2 != p3
+ # ~~~
+ #
+ # Untyped because we want that `p1 == p2` above.
+ # So the method just ignores the real types of `E` and `F`.
+ redef fun ==(o) do return o isa Pair[nullable Object, nullable Object] and e == o.e and f == o.f
+
+ redef fun hash do return e.hash * 13 + f.hash * 27 # Magic numbers are magic!
+end
+
+# A view of a Cartesian-product collection over two collections.
+#
+# A Cartesian product over two collections is just a collection of pairs.
+# Therefore, this view *contains* all the pairs of elements constructed by associating each
+# element of the first collection to each element of the second collection.
+#
+# However the view is memory-efficient and the pairs are created only when needed.
+#
+# A simple Cartesian product
+# ~~~~
+# var c1 = [1,2]
+# var c2 = ["a","b","c"]
+# var c12 = new Cartesian[Int,String](c1, c2)
+# assert c12.length == 6
+# assert c12.join(";") == "(1,a);(1,b);(1,c);(2,a);(2,b);(2,c)" # All the 6 pairs
+# ~~~~
+#
+# Note: because it is a view, changes on the base collections are reflected on the view.
+#
+# E.g. c12 is a view on c1 and c2, so if c1 changes, then c12 "changes".
+# ~~~~
+# assert c2.pop == "c"
+# assert c12.length == 4
+# assert c12.join(";") == "(1,a);(1,b);(2,a);(2,b)" # All the 4 remaining pairs
+# ~~~~
+#
+# Cartesian objects are collections, so can be used to build another Cartesian object.
+# ~~~~
+# var c3 = [1000..2000[
+# var c123 = new Cartesian[Pair[Int,String],Int](c12, c3)
+# assert c123.length == 4000
+# ~~~~
+#
+# All methods of Collection are inherited, it is so great!
+#
+# E.g. search elements?
+# ~~~~
+# var p12 = new Pair[Int,String](2,"b")
+# assert c12.has(p12) == true
+# var p123 = new Pair[Pair[Int, String], Int](p12, 1500)
+# var p123bis = new Pair[Pair[Int, String], Int](p12, 0)
+# assert c123.has(p123) == true
+# assert c123.has(p123bis) == false
+# ~~~~
+class Cartesian[E, F]
+ super Collection[Pair[E,F]]
+
+ # The first collection
+ var ce: Collection[E]
+
+ # The second collection
+ var cf: Collection[F]
+
+ redef fun length do return ce.length * cf.length # optional, but so efficient...
+
+ redef fun iterator do return new CartesianIterator[E,F](self)
+
+ # Returns a new Cartesian where the first collection is the second.
+ # Because the full collection is virtual, the operation is cheap!
+ fun swap: Cartesian[F, E] do return new Cartesian[F, E](cf, ce)
+end
+
+# An iterator over a `Cartesian`-product collection.
+class CartesianIterator[E,F]
+ super Iterator[Pair[E,F]]
+
+ # The associated Cartesian-product collection.
+ var collection: Cartesian[E,F]
+
+ # The iterator over the first collection of the Cartesian product.
+ # Will be used only once.
+ private var ice: Iterator[E] is noinit
+
+ # The iterator over the second collection of the Cartesian product.
+ # Will be used once for each element of the first collection.
+ private var icf: Iterator[F] is noinit
+
+ init do
+ # Initialize each iterator
+ ice = collection.ce.iterator
+ icf = collection.cf.iterator
+ end
+
+ redef fun is_ok do return ice.is_ok and icf.is_ok
+
+ redef fun item do
+ # We lazily create the pair here
+ var res = item_cache
+ if res == null then
+ res = new Pair[E,F](ice.item, icf.item)
+ item_cache = res
+ end
+ return res
+ end
+
+ # Cached pair created by `item` and cleared by `next`.
+ private var item_cache: nullable Pair[E,F] = null
+
+ redef fun next do
+ # Next item in the second iterator
+ icf.next
+ if not icf.is_ok then
+ # If it is over, then reset it and advance the first iterator
+ icf = collection.cf.iterator
+ ice.next
+ end
+ # Reset the cache
+ item_cache = null
+ end
+
+ # First member of `item`.
+ #
+ # This method shortcut the allocation of a `Pair`, thus should be more time and memory efficient.
+ fun item_e: E do return ice.item
+
+ # Second member of `item`.
+ #
+ # This method shortcut the allocation of a `Pair`, thus should be more time and memory efficient.
+ fun item_f: E do return icf.item
+end
--- /dev/null
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# This file is free software, which comes along with NIT. This software is
+# distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE. You can modify it is you want, provided this header
+# is kept unaltered, and a notification of the changes is added.
+# You are allowed to redistribute it and sell it, alone or is a part of
+# another product.
+
+# Cartesian products, combinations and permutation on collections.
+#
+# This module offers memory-efficient views on combinatoric collections.
+# Methods of the views create objects only when needed.
+# Moreover, produced objects during iterations are free to be collected and
+# their memory reused.
+#
+# This enable these views and method to works with very combinatoric large collections.
+#
+# When small combinatoric views need to be kept in memory (for fast access by example).
+# The `Collection::to_a` method and other related factories can be used to transform
+# the combinatoric views into extensive collections,
+module combinations
+
+redef class Collection[E]
+ # Cartesian product, over `r` times `self`.
+ #
+ # See `CartesianCollection` for details.
+ #
+ # FIXME: Cannot be used if RTA is enabled. So `niti` or `--erasure` only.
+ fun product(r: Int): Collection[SequenceRead[E]]
+ do
+ return new CartesianCollection[E]([self]*r)
+ end
+
+ # All `r`-length permutations on self (all possible ordering) without repeated elements.
+ #
+ # See `CartesianCollection` for details.
+ #
+ # FIXME: Cannot be used if RTA is enabled. So `niti` or `--erasure` only.
+ fun permutations(r: Int): Collection[SequenceRead[E]]
+ do
+ var res = new CombinationCollection[E](self, r)
+ res.are_sorted = false
+ res.are_unique = true
+ return res
+ end
+
+ # All `r`-length combinations on self (in same order) without repeated elements.
+ #
+ # See `CartesianCollection` for details.
+ #
+ # FIXME: Cannot be used if RTA is enabled. So `niti` or `--erasure` only.
+ fun combinations(r: Int): Collection[SequenceRead[E]]
+ do
+ var res = new CombinationCollection[E](self, r)
+ res.are_sorted = true
+ res.are_unique = true
+ return res
+ end
+
+ # All `r`-length combination on self (in same order) with repeated elements.
+ #
+ # See `CartesianCollection` for details.
+ #
+ # FIXME: Cannot be used if RTA is enabled. So `niti` or `--erasure` only.
+ fun combinations_with_replacement(r: Int): Collection[SequenceRead[E]]
+ do
+ var res = new CombinationCollection[E](self, r)
+ res.are_sorted = true
+ res.are_unique = false
+ return res
+ end
+end
+
+# A view of a Cartesian-product collection over homogeneous collections.
+#
+# Therefore, this view *generates* all the sequences of elements constructed by associating
+# en element for each one of the original collections.
+#
+# It is equivalent to doing nesting `for` for each collection.
+#
+# ~~~~
+# var xs = [1, 2, 3]
+# var ys = [8, 9]
+# var xys = new CartesianCollection[Int]([xs, ys])
+# assert xys.length == 6
+# assert xys.to_a == [[1,8], [1,9], [2,8], [2,9], [3,8], [3,9]]
+# ~~~~
+#
+# The pattern of the generate sequences produces a lexicographical order.
+#
+# Because it is a generator, it is memory-efficient and the sequences are created only when needed.
+#
+# Note: because it is a view, changes on the base collections are reflected on the view.
+#
+# ~~~~
+# assert xs.pop == 3
+# assert ys.pop == 9
+# assert xys.to_a == [[1,8], [2,8]]
+# ~~~~
+class CartesianCollection[E]
+ super Collection[SequenceRead[E]]
+
+ # The base collections used to generate the sequences.
+ var collections: SequenceRead[Collection[E]]
+
+ redef fun length
+ do
+ var res = 1
+ for c in collections do res = res * c.length
+ return res
+ end
+
+ redef fun iterator do return new CartesianIterator[E](self)
+end
+
+private class CartesianIterator[E]
+ super Iterator[SequenceRead[E]]
+ var collection: CartesianCollection[E]
+
+ # The array of iterations that will be increased in the lexicographic order.
+ private var iterators = new Array[Iterator[E]]
+
+ init
+ do
+ for c in collection.collections do
+ var it = c.iterator
+ iterators.add it
+ if not it.is_ok then is_ok = false
+ end
+ end
+
+ redef var is_ok = true
+
+ redef fun item
+ do
+ var len = iterators.length
+ var res = new Array[E].with_capacity(len)
+ for i in [0..len[ do
+ var it = iterators[i]
+ res.add(it.item)
+ end
+ return res
+ end
+
+ redef fun next
+ do
+ var rank = iterators.length - 1
+
+ # Odometer-like increment starting from the last iterator
+ loop
+ var it = iterators[rank]
+ it.next
+ if it.is_ok then return
+
+ # The iterator if over
+ if rank == 0 then
+ # It it is the first, then the whole thing is over
+ is_ok = false
+ return
+ end
+
+ # If not, restart the iterator and increment the previous one
+ # (like a carry)
+ iterators[rank] = collection.collections[rank].iterator
+ rank -= 1
+ end
+ end
+end
+
+# A view of some combinations over a base collections.
+#
+# This view *generates* some combinations and permutations on a collection.
+#
+# By default, the generated sequences are combinations:
+#
+# * each sequence has a length of `repeat`
+# * elements are in sorted order (see `are_sorted` for details)
+# * no repeated element (see `are_unique` for details)
+#
+# ~~~~
+# var xs = [1, 2, 3]
+# var cxs = new CombinationCollection[Int](xs, 2)
+# assert cxs.length == 3
+# assert cxs.to_a == [[1,2], [1,3], [2,3]]
+# ~~~~
+#
+# Other kind of combinations can be generated by tweaking the attributes `are_sorted` and `are_unique`.
+#
+# * for permutation:
+#
+# ~~~~
+# cxs.are_sorted = false
+# cxs.are_unique = true
+# assert cxs.length == 6
+# assert cxs.to_a == [[1,2], [1,3], [2,1], [2,3], [3,1], [3,2]]
+# ~~~~
+#
+# * for combinations with replacement:
+#
+# ~~~~
+# cxs.are_sorted = true
+# cxs.are_unique = false
+# assert cxs.length == 6
+# assert cxs.to_a == [[1,1], [1,2], [1,3], [2,2], [2,3], [3,3]]
+# ~~~~
+#
+# * for product:
+#
+# ~~~~
+# cxs.are_sorted = false
+# cxs.are_unique = false
+# assert cxs.length == 9
+# assert cxs.to_a == [[1,1], [1,2], [1,3], [2,1], [2,2], [2,3], [3,1], [3,2], [3,3]]
+# ~~~~
+#
+# However, in the last case, a faster alternative is to use `CartesianCollection`:
+#
+# ~~~~
+# var cp = new CartesianCollection[Int]([xs] * 2)
+# assert cp.to_a == cxs.to_a
+# ~~~~
+#
+# As seen in the examples, the patterns of the generated sequences produce a lexicographical order.
+#
+# Because it is a generator, it is memory-efficient and the sequences are created only when needed.
+#
+# Note: because it is a view, changes on the base collection are reflected on the view.
+#
+# ~~~~
+# assert xs.pop == 3
+# cxs.are_sorted = true
+# cxs.are_unique = true
+# assert cxs.to_a == [[1,2]]
+# ~~~~
+class CombinationCollection[E]
+ super Collection[SequenceRead[E]]
+
+ # The base collection used to generate the sequences.
+ var collection: Collection[E]
+
+ # The maximum length of each generated sequence.
+ var repeat: Int
+
+ init
+ do
+ assert repeat >= 0
+ end
+
+ # Are the elements in the generated sequences sorted?
+ # Default `true`.
+ #
+ # When `true`, the original order is preserved.
+ #
+ # Elements are compared by their order in the base collection,
+ # not by their intrinsic value or comparability.
+ #
+ # ~~~~
+ # var xs = [1, 1, 2]
+ # var cxs = new CombinationCollection[Int](xs, 2)
+ # cxs.are_sorted = true
+ # assert cxs.to_a == [[1,1], [1,2], [1, 2]]
+ # cxs.are_sorted = false
+ # assert cxs.to_a == [[1,1], [1,2], [1, 1], [1, 2], [2, 1], [2, 1]]
+ # ~~~~
+ var are_sorted = true is writable
+
+ # Are the element in the generated sequence unique?
+ # Default `true`.
+ #
+ # When `true`, an element cannot be reused in the same sequence (no replacement).
+ #
+ # Elements are distinguished by their order in the base collection,
+ # not by their intrinsic value or equality.
+ #
+ # ~~~~
+ # var xs = [1, 1, 2]
+ # var cxs = new CombinationCollection[Int](xs, 2)
+ # cxs.are_unique = true
+ # assert cxs.to_a == [[1,1], [1,2], [1, 2]]
+ # cxs.are_unique = false
+ # assert cxs.to_a == [[1,1], [1,1], [1,2], [1,1], [1,2], [2,2]]
+ # ~~~~
+ var are_unique = true is writable
+
+ redef fun length
+ do
+ var n = collection.length
+ if are_unique then
+ if repeat > n then
+ return 0
+ end
+ if are_sorted then
+ return n.factorial / repeat.factorial
+ else
+ return n.factorial / (n-repeat).factorial
+ end
+ else
+ if are_sorted then
+ return (n+repeat-1).factorial / repeat.factorial / (n-1).factorial
+ else
+ return n ** repeat
+ end
+ end
+ end
+
+ redef fun iterator do
+ return new CombinationIterator[E](self)
+ end
+end
+
+private class CombinationIterator[E]
+ super Iterator[SequenceRead[E]]
+ var product: CombinationCollection[E]
+
+ private var iterators = new Array[Iterator[E]]
+ private var indices = new Array[Int]
+
+ var are_sorted: Bool is noinit
+ var are_unique: Bool is noinit
+
+ init
+ do
+ are_sorted = product.are_sorted
+ are_unique = product.are_unique
+
+ for rank in [0..product.repeat[ do
+ reset_iterator(rank)
+ end
+ end
+
+ redef var is_ok = true
+
+ redef fun item
+ do
+ var len = product.repeat
+ var res = new Array[E].with_capacity(len)
+ for i in [0..len[ do
+ var it = iterators[i]
+ res.add(it.item)
+ end
+ return res
+ end
+
+ redef fun next
+ do
+ var rank = product.repeat - 1
+
+ loop
+ var it = iterators[rank]
+
+ if are_unique and not are_sorted then
+ var idx = indices[rank] + 1
+ it.next
+ var adv = next_free(rank, idx)
+ for i in [idx..adv[ do it.next
+ indices[rank] = adv
+ else
+ it.next
+ indices[rank] += 1
+ end
+
+ if it.is_ok then break
+ if rank == 0 then
+ is_ok = false
+ return
+ end
+ rank -= 1
+ end
+
+ for r in [rank+1..product.repeat[ do
+ reset_iterator(r)
+ end
+ end
+
+ private fun next_free(rank: Int, start: Int): Int
+ do
+ loop
+ for i in [0..rank[ do
+ if indices[i] == start then
+ start += 1
+ continue label
+ end
+ end
+ break label
+ end label
+ return start
+ end
+
+ private fun reset_iterator(rank: Int): Iterator[E]
+ do
+ var it = product.collection.iterator
+ iterators[rank] = it
+ var skip = 0
+
+ if (not are_sorted and not are_unique) or rank == 0 then
+ # DO NOTHING
+ else if are_sorted and are_unique then
+ skip = indices[rank-1] + 1
+ else if are_sorted then
+ skip = indices[rank-1]
+ else
+ skip = next_free(rank, 0)
+ end
+
+ for i in [0..skip[ do it.next
+ indices[rank] = skip
+ if not it.is_ok then is_ok = false
+ return it
+ end
+
+ fun need_skip: Bool
+ do
+ if not are_sorted and not are_unique then
+ return false
+ else if are_sorted and are_unique then
+ var max = -1
+ for i in indices do
+ if i <= max then return true
+ max = i
+ end
+ return false
+ else if are_sorted then
+ var max = -1
+ for i in indices do
+ if i < max then return true
+ max = i
+ end
+ return false
+ else
+ # are_unique
+ for i in indices do
+ if indices.count(i) > 1 then return true
+ end
+ return false
+ end
+ end
+end
--- /dev/null
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# This file is free software, which comes along with NIT. This software is
+# distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
+# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE. You can modify it is you want, provided this header
+# is kept unaltered, and a notification of the changes is added.
+# You are allowed to redistribute it and sell it, alone or is a part of
+# another product.
+
+# Uses JSON as a storage medium for a Neo4j subgraph.
+module neo4j::json_store
+
+import neo4j
+private import template
+
+# A Neo4j graph that uses as a storage medium.
+#
+# The graph is stored as a JSON object with the following properties:
+#
+# * `"nodes"`: An array with all nodes. Each node is an object with the
+# following properties:
+# * `"id"`: The ID (`Int`) that uniquely identifies the node in the current
+# graph.
+# * `"labels"`: An array of all applied labels.
+# * `"properties"`: An object mapping each defined property to its value.
+# * `"links"`: An array with all relationships. Each relationship is an object
+# with the following properties:
+# * `"type"`: The type (`String`) of the relationship.
+# * `"properties"`: An object mapping each defined property to its value.
+# * `"from"`: The ID (`Int`) of the source node.
+# * `"to"`: The ID (`Int`) of the destination node.
+#
+# TODO Refine the graph API instead when it will be available.
+class JsonGraph
+ super Jsonable
+
+ # All nodes in the graph.
+ var nodes: SimpleCollection[NeoNode] = new Array[NeoNode]
+
+ # All relationships in the graph.
+ var links: SimpleCollection[NeoEdge] = new Array[NeoEdge]
+
+ # Create an empty graph.
+ init do end
+
+ # Retrieve the graph from the specified JSON value.
+ #
+ # var graph = new JsonGraph
+ # var a = new NeoNode
+ # a.labels.add "Foo"
+ # a["answer"] = 42
+ # a["Ultimate question of"] = new JsonArray.from(["life",
+ # "the Universe", "and Everything."])
+ # graph.nodes.add a
+ # var b = new NeoNode
+ # b.labels.add "Foo"
+ # b.labels.add "Bar"
+ # graph.nodes.add b
+ # graph.links.add new NeoEdge(a, "BAZ", b)
+ # #
+ # graph = new JsonGraph.from_json(graph.to_json)
+ # assert 1 == graph.links.length
+ # for link in graph.links do
+ # assert "BAZ" == link.rel_type
+ # assert a.labels == link.from.labels
+ # for k, v in a.properties do assert v == link.from.properties[k]
+ # assert b.labels == link.to.labels
+ # for k, v in b.properties do assert v == link.to.properties[k]
+ # end
+ # assert 2 == graph.nodes.length
+ init from_json(t: Text) do
+ from_json_object(t.to_jsonable.as(JsonObject))
+ end
+
+ # Retrieve the graph from the specified JSON object.
+ init from_json_object(o: JsonObject) do
+ var node_by_id = new HashMap[Int, NeoNode]
+ var nodes = o["nodes"].as(JsonArray)
+ for json_node in nodes do
+ assert json_node isa JsonObject
+ var node = new NeoNode.from_json_object(json_node)
+ node_by_id[json_node["id"].as(Int)] = node
+ self.nodes.add node
+ end
+ var links = o["links"].as(JsonArray)
+ for json_link in links do
+ assert json_link isa JsonObject
+ var from = node_by_id[json_link["from"].as(Int)]
+ var to = node_by_id[json_link["to"].as(Int)]
+ var rel_type = json_link["type"].as(String)
+ var json_properties = json_link["properties"].as(JsonObject)
+ var link = new NeoEdge(from, rel_type, to)
+ link.properties.recover_with(json_properties)
+ self.links.add link
+ end
+ end
+
+ redef fun to_json do
+ var t = new Template
+ t.add "\{\"nodes\":["
+ var i = 0
+ for n in nodes do
+ if i > 0 then t.add ","
+ t.add n.to_json
+ i += 1
+ end
+ t.add "],\"links\":["
+ i = 0
+ for link in links do
+ if i > 0 then t.add ","
+ t.add link.to_json
+ i += 1
+ end
+ t.add "]\}"
+ return t.write_to_string
+ end
+end
+
+# Make `NeoNode` `Jsonable`.
+redef class NeoNode
+ super Jsonable
+
+ # Retrieve the node from the specified JSON value.
+ #
+ # Note: Here, the `"id"` is optional and ignored.
+ #
+ # SEE: `JsonGraph`
+ #
+ # var node = new NeoNode.from_json("""
+ # {
+ # "labels": ["foo", "Bar"],
+ # "properties": {
+ # "baz": 42
+ # }
+ # }
+ # """)
+ # assert ["foo", "Bar"] == node.labels
+ # assert 42 == node["baz"]
+ init from_json(t: Text) do
+ from_json_object(t.to_jsonable.as(JsonObject))
+ end
+
+ # Retrieve the node from the specified JSON value.
+ #
+ # Note: Here, the `"id"` is optional and ignored.
+ #
+ # SEE: `JsonGraph`
+ init from_json_object(o: JsonObject) do
+ init
+ var labels = o["labels"].as(JsonArray)
+ for lab in labels do self.labels.add(lab.as(String))
+ var json_properties = o["properties"].as(JsonObject)
+ properties.recover_with(json_properties)
+ end
+
+ # Get the JSON representation of `self`.
+ #
+ # SEE: `JsonGraph`
+ redef fun to_json do
+ var t = new Template
+ t.add "\{\"id\":"
+ t.add object_id.to_json
+ t.add ",\"labels\":["
+ var i = 0
+ for lab in labels do
+ if i > 0 then t.add ","
+ t.add lab.to_json
+ i += 1
+ end
+ t.add "],\"properties\":"
+ t.add properties.to_json
+ t.add "}"
+ return t.write_to_string
+ end
+
+ redef fun to_s do return to_json
+end
+
+# Make `NeoEdge` `Jsonable`.
+redef class NeoEdge
+ super Jsonable
+
+ redef fun to_json do
+ var t = new Template
+ t.add "\{\"type\":"
+ t.add rel_type.to_json
+ t.add ",\"properties\":"
+ t.add properties.to_json
+ t.add ",\"from\":"
+ t.add from.object_id.to_json
+ t.add ",\"to\":"
+ t.add to.object_id.to_json
+ t.add "}"
+ return t.write_to_string
+ end
+
+ redef fun to_s do return to_json
+end
# Perform a `CypherQuery`
# see: CypherQuery
fun cypher(query: CypherQuery): Jsonable do
- return post("{cypher_url}", query.to_json)
+ return post("{cypher_url}", query.to_rest)
end
# GET JSON data from `url`
return self
end
- # Translate the query to JSON
- fun to_json: JsonObject do
+ # Translate the query to the body of a corresponding Neo4j REST request.
+ fun to_rest: JsonObject do
var obj = new JsonObject
obj["query"] = query
if not params.is_empty then
return obj
end
- redef fun to_s do return to_json.to_s
+ redef fun to_s do return to_rest.to_s
end
# The fundamental units that form a graph are nodes and relationships.
# Is the property `key` set?
fun has_key(key: String): Bool do return properties.has_key(key)
-
- # Translate `self` to JSON
- fun to_json: JsonObject do return properties
end
# Nodes are used to represent entities stored in base.
var tpl = new FlatBuffer
tpl.append "\{"
tpl.append "labels: [{labels.join(", ")}],"
- tpl.append "data: {to_json}"
+ tpl.append "data: {properties.to_json}"
tpl.append "\}"
return tpl.write_to_string
end
# Get edge type
fun rel_type: nullable String do return internal_type
- redef fun to_json do
+ # Get the JSON body of a REST request that create the relationship.
+ private fun to_rest: JsonObject do
var obj = new JsonObject
if to.is_linked then
obj["to"] = to.url
else
job.to = "\{{edge.from.batch_id.to_s}\}/relationships"
end
- job.body = edge.to_json
+ job.body = edge.to_rest
end
# Create multiple edges
var request = new JsonPOST(client.batch_url, client.curl)
# request.headers["X-Stream"] = "true"
var json_jobs = new JsonArray
- for job in jobs.values do json_jobs.add job.to_json
+ for job in jobs.values do json_jobs.add job.to_rest
request.data = json_jobs
var response = request.execute
var res = client.parse_response(response)
var body: nullable Jsonable = null
# JSON formated job
- fun to_json: JsonObject do
+ fun to_rest: JsonObject do
var job = new JsonObject
job["id"] = id
job["method"] = method
res.append(other)
return res
end
+
+ # Repetition of arrays.
+ #
+ # returns a new array built by concatenating `self` `repeat` times.
+ #
+ # var a = [1,2,3]
+ # assert (a * 0).is_empty
+ # assert a * 1 == [1,2,3]
+ # assert a * 2 == [1,2,3,1,2,3]
+ # assert (a * 10).length == 30
+ fun *(repeat: Int): Array[E]
+ do
+ assert repeat >= 0
+ var res = new Array[E].with_capacity(length * repeat)
+ while repeat > 0 do
+ res.add_all(self)
+ repeat -= 1
+ end
+ return res
+ end
end
# An `Iterator` on `AbstractArray`
#
# assert not 13.is_even
fun is_odd: Bool do return not is_even
+
+ # Returns the `self` raised to the power of `e`.
+ #
+ # assert 2 ** 3 == 8
+ fun **(e: Int): Int
+ do
+ return self.to_f.pow(e.to_f).to_i
+ end
+
+ # The factorial of `self` (aka `self!`)
+ #
+ # Returns `1 * 2 * 3 * ... * self-1 * self`
+ #
+ # assert 0.factorial == 1 # by convention for an empty product
+ # assert 1.factorial == 1
+ # assert 4.factorial == 24
+ # assert 9.factorial == 362880
+ fun factorial: Int
+ do
+ assert self >= 0
+ var res = 1
+ var n = self
+ while n > 0 do
+ res = res * n
+ n -= 1
+ end
+ return res
+ end
end
redef class Float
# assert "\n\"'\\\{\}".escape_to_nit == "\\n\\\"\\'\\\\\\\{\\\}"
fun escape_to_nit: String do return escape_more_to_c("\{\}")
+ # Escape to POSIX Shell (sh).
+ #
+ # Abort if the text contains a null byte.
+ #
+ # assert "\n\"'\\\{\}0".escape_to_sh == "'\n\"'\\''\\\{\}0'"
+ fun escape_to_sh: String do
+ var b = new FlatBuffer
+ b.chars.add '\''
+ for i in [0..length[ do
+ var c = chars[i]
+ if c == '\'' then
+ b.append("'\\''")
+ else
+ assert without_null_byte: c != '\0'
+ b.add(c)
+ end
+ end
+ b.chars.add '\''
+ return b.to_s
+ end
+
# Return a string where Nit escape sequences are transformed.
#
# var s = "\\n"
--- /dev/null
+#!/bin/bash
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script is a wrapper for `nitester` which also manages a local repo
+#
+# The first argument _must_ be the hash of the commit at the head of the
+# branch to test. The other arguments are passed on to `nitester`.
+
+hash=$1
+shift
+
+set +x
+
+local_repo=nit/
+remote_repo=privat
+
+tools_dir=misc/jenkins/
+
+cd $local_repo
+git clean -fdxq .
+
+git fetch $remote_repo
+git checkout $hash
+
+# Make nitg and tools
+$tools_dir/unitrun.sh "run-make-0initial_make" make
+
+# Make nitester
+$tools_dir/unitrun.sh "run-make-nitester" make -C contrib/nitester/
+
+# Run tests
+cd tests
+mkdir -p out
+rm ~/jenkins_xml/*.xml
+mpirun -np 30 ../contrib/nitester/bin/nitester $@
name=$1
shift
+# Detect a working time command
+if env time --quiet -f%U true 2>/dev/null; then
+ TIME="env time --quiet -f%U -o '${name}.t.out'"
+elif env time -f%U true 2>/dev/null; then
+ TIME="env time -f%U -o '${name}.t.out'"
+else
+ TIME=
+fi
+
# Magic here! This tee and save both stdout and stderr in distinct files without messing with them
# Time just get the user time
-/usr/bin/time -f%U --quiet -o "${name}.t.out" "$@" > >(tee "${name}.out") 2> >(tee "${name}.2.out" >&2)
+$TIME "$@" > >(tee "${name}.out") 2> >(tee "${name}.2.out" >&2)
res=$?
c=`echo "${name%-*}" | tr "-" "."`
mtype.compile_extern_type(v, ccu)
# has callbacks already been compiled? (this may very well happen with global compilation)
- if mmodule.check_callback_compilation(mtype) then mtype.compile_extern_helper_functions(v, ccu)
+ mtype.compile_extern_helper_functions(v, ccu, mmodule.check_callback_compilation(mtype))
end
end
# compile callbacks
- for cb in foreign_callbacks.callbacks do if mainmodule.check_callback_compilation(cb) then
- cb.compile_extern_callback(v, ccu)
+ for cb in foreign_callbacks.callbacks do
+ cb.compile_extern_callback(v, ccu, mainmodule.check_callback_compilation(cb))
end
- for cb in foreign_callbacks.supers do if mainmodule.check_callback_compilation(cb) then
- cb.compile_extern_callback(v, ccu)
+ for cb in foreign_callbacks.supers do
+ cb.compile_extern_callback(v, ccu, mainmodule.check_callback_compilation(cb))
end
- for cb in foreign_callbacks.casts do if mainmodule.check_callback_compilation(cb) then
- cb.compile_extern_callbacks(v, ccu)
+ for cb in foreign_callbacks.casts do
+ cb.compile_extern_callbacks(v, ccu, mainmodule.check_callback_compilation(cb))
end
# manage nitni callback set
ccu.header_c_types.add("#endif\n")
end
- private fun compile_extern_helper_functions(v: AbstractCompilerVisitor, ccu: CCompilationUnit)
+ private fun compile_extern_helper_functions(v: AbstractCompilerVisitor, ccu: CCompilationUnit, compile_implementation_too: Bool)
do
# actually, we do not need to do anything when using the bohem garbage collector
var call_context = from_c_call_context
end
redef class MNullableType
- redef fun compile_extern_helper_functions(v, ccu)
+ redef fun compile_extern_helper_functions(v, ccu, compile_implementation_too)
do
super
# In nitni files, #define friendly as extern
ccu.header_decl.add("#define {base_cname} {full_cname}\n")
+ if not compile_implementation_too then return
+
# FIXME: This is ugly an broke the separate compilation principle
# The real function MUST be compiled only once, #define pragma only protect the compiler, not the loader
# However, I am not sure of the right approach here (eg. week refs are ugly)
end
redef class MExplicitCall
- private fun compile_extern_callback(v: AbstractCompilerVisitor, ccu: CCompilationUnit)
+ private fun compile_extern_callback(v: AbstractCompilerVisitor, ccu: CCompilationUnit, compile_implementation_too: Bool)
do
var mproperty = mproperty
assert mproperty isa MMethod
var full_friendly_csignature = mproperty.build_csignature(recv_mtype, v.compiler.mainmodule, null, long_signature, internal_call_context)
ccu.header_decl.add("extern {full_friendly_csignature};\n")
+ if not compile_implementation_too then return
+
# Internally, implement internal function
var nitni_visitor = v.compiler.new_visitor
nitni_visitor.frame = v.frame
end
redef class MExplicitSuper
- private fun compile_extern_callback(v: AbstractCompilerVisitor, ccu: CCompilationUnit)
+ private fun compile_extern_callback(v: AbstractCompilerVisitor, ccu: CCompilationUnit, compile_implementation_too: Bool)
do
var mproperty = from.mproperty
assert mproperty isa MMethod
var internal_cname = mproperty.build_cname(mclass_type, v.compiler.mainmodule, "___super", long_signature)
ccu.header_decl.add("#define {friendly_cname} {internal_cname}\n")
+ if not compile_implementation_too then return
+
# Internally, implement internal function
var nitni_visitor = v.compiler.new_visitor
nitni_visitor.frame = v.frame
end
redef class MExplicitCast
- private fun compile_extern_callbacks(v: AbstractCompilerVisitor, ccu: CCompilationUnit)
+ private fun compile_extern_callbacks(v: AbstractCompilerVisitor, ccu: CCompilationUnit, compile_implementation_too: Bool)
do
var from = from
var to = to
# In nitni files, #define friendly as extern
ccu.header_decl.add("#define {check_cname} {v.compiler.mainmodule.name}___{check_cname}\n")
- # Internally, implement internal function
- var nitni_visitor = v.compiler.new_visitor
- nitni_visitor.frame = v.frame
+ if compile_implementation_too then
+ # Internally, implement internal function
+ var nitni_visitor = v.compiler.new_visitor
+ nitni_visitor.frame = v.frame
- var full_internal_csignature = "int {v.compiler.mainmodule.name }___{from.mangled_cname}_is_a_{to.mangled_cname}({internal_call_context.name_mtype(from)} from)"
+ var full_internal_csignature = "int {v.compiler.mainmodule.name }___{from.mangled_cname}_is_a_{to.mangled_cname}({internal_call_context.name_mtype(from)} from)"
- nitni_visitor.add_decl("/* nitni check for {from} to {to} */")
- nitni_visitor.add_decl("{full_internal_csignature} \{")
+ nitni_visitor.add_decl("/* nitni check for {from} to {to} */")
+ nitni_visitor.add_decl("{full_internal_csignature} \{")
- #var from_var = new RuntimeVariable("from->value", from, from)
- var from_var = nitni_visitor.var_from_c("from", from)
- from_var = nitni_visitor.box_extern(from_var, from)
- var recv_var = nitni_visitor.type_test(from_var, to, "FFI isa")
- nitni_visitor.add("return {recv_var};")
+ var from_var = nitni_visitor.var_from_c("from", from)
+ from_var = nitni_visitor.box_extern(from_var, from)
+ var recv_var = nitni_visitor.type_test(from_var, to, "FFI isa")
+ nitni_visitor.add("return {recv_var};")
- nitni_visitor.add("\}")
+ nitni_visitor.add("\}")
+ end
# special checks
if from == to.as_nullable then
# In nitni files, #define friendly as extern
ccu.header_decl.add("#define {cast_cname} {v.compiler.mainmodule.name}___{cast_cname}\n")
- # Internally, implement internal function
- nitni_visitor = v.compiler.new_visitor
- nitni_visitor.frame = v.frame
+ if compile_implementation_too then
+ # Internally, implement internal function
+ var nitni_visitor = v.compiler.new_visitor
+ nitni_visitor.frame = v.frame
- full_internal_csignature = "{to.cname_blind} {v.compiler.mainmodule.name }___{from.mangled_cname}_as_{to.mangled_cname}({internal_call_context.name_mtype(from)} from)"
- nitni_visitor.add_decl("/* nitni cast for {from} to {to} */")
- nitni_visitor.add_decl("{full_internal_csignature} \{")
+ var full_internal_csignature = "{to.cname_blind} {v.compiler.mainmodule.name }___{from.mangled_cname}_as_{to.mangled_cname}({internal_call_context.name_mtype(from)} from)"
+ nitni_visitor.add_decl("/* nitni cast for {from} to {to} */")
+ nitni_visitor.add_decl("{full_internal_csignature} \{")
- from_var = nitni_visitor.var_from_c("from", from)
- from_var = nitni_visitor.box_extern(from_var, from)
+ var from_var = nitni_visitor.var_from_c("from", from)
+ from_var = nitni_visitor.box_extern(from_var, from)
- ## test type
- var check = nitni_visitor.type_test(from_var, to, "FFI cast")
- nitni_visitor.add("if (!{check}) \{")
- nitni_visitor.add_abort("FFI cast failed")
- nitni_visitor.add("\}")
+ ## test type
+ var check = nitni_visitor.type_test(from_var, to, "FFI cast")
+ nitni_visitor.add("if (!{check}) \{")
+ nitni_visitor.add_abort("FFI cast failed")
+ nitni_visitor.add("\}")
- ## internal cast
- recv_var = nitni_visitor.autobox(from_var, to)
- recv_var = nitni_visitor.unbox_extern(recv_var, to)
+ ## internal cast
+ var recv_var = nitni_visitor.autobox(from_var, to)
+ recv_var = nitni_visitor.unbox_extern(recv_var, to)
- nitni_visitor.ret_to_c(recv_var, to)
+ nitni_visitor.ret_to_c(recv_var, to)
- nitni_visitor.add("\}")
+ nitni_visitor.add("\}")
+ end
# special casts
if from.as_nullable == to then
end
# copy shared files
if ctx.opt_shareurl.value == null then
- sys.system("cp -r {sharedir.to_s}/* {output_dir.to_s}/")
+ sys.system("cp -r -- {sharedir.to_s.escape_to_sh}/* {output_dir.to_s.escape_to_sh}/")
else
- sys.system("cp -r {sharedir.to_s}/resources/ {output_dir.to_s}/resources/")
+ sys.system("cp -r -- {sharedir.to_s.escape_to_sh}/resources/ {output_dir.to_s.escape_to_sh}/resources/")
end
end
fun tpl_graph(dot: Buffer, name: String, title: nullable String): nullable TplArticle do
if ctx.opt_nodot.value then return null
var output_dir = ctx.output_dir
- var file = new OFStream.open("{output_dir}/{name}.dot")
+ var path = output_dir / name
+ var path_sh = path.escape_to_sh
+ var file = new OFStream.open("{path}.dot")
file.write(dot)
file.close
- sys.system("\{ test -f {output_dir}/{name}.png && test -f {output_dir}/{name}.s.dot && diff {output_dir}/{name}.dot {output_dir}/{name}.s.dot >/dev/null 2>&1 ; \} || \{ cp {output_dir}/{name}.dot {output_dir}/{name}.s.dot && dot -Tpng -o{output_dir}/{name}.png -Tcmapx -o{output_dir}/{name}.map {output_dir}/{name}.s.dot ; \}")
- var fmap = new IFStream.open("{output_dir}/{name}.map")
+ sys.system("\{ test -f {path_sh}.png && test -f {path_sh}.s.dot && diff -- {path_sh}.dot {path_sh}.s.dot >/dev/null 2>&1 ; \} || \{ cp -- {path_sh}.dot {path_sh}.s.dot && dot -Tpng -o{path_sh}.png -Tcmapx -o{path_sh}.map {path_sh}.s.dot ; \}")
+ var fmap = new IFStream.open("{path}.map")
var map = fmap.read_all
fmap.close
var alt = ""
if title != null then
article.title = title
- alt = "alt='{title}'"
+ alt = "alt='{title.html_escape}'"
end
article.css_classes.add "text-center"
var content = new Template
- content.add "<img src='{name}.png' usemap='#{name}' style='margin:auto' {alt}/>"
+ var name_html = name.html_escape
+ content.add "<img src='{name_html}.png' usemap='#{name_html}' style='margin:auto' {alt}/>"
content.add map
article.content = content
return article
dot.mprojects.add(g.mproject)
end
var projectpath = toolcontext.output_dir.join_path("project_hierarchy.dot")
- print "generating {projectpath}"
+ print "generating project_hierarchy.dot"
dot.write_to_file(projectpath)
var modulepath = toolcontext.output_dir.join_path("module_hierarchy.dot")
dot.mprojects.add_all(model.mprojects)
- print "generating {modulepath}"
+ print "generating module_hierarchy.dot"
dot.write_to_file(modulepath)
end
var file = toolcontext.opt_output.value
if file == null then file = "nitunit.xml"
page.write_to_file(file)
-print "Results saved in {file}"
# print docunits results
-print "\nDocUnits:"
+print "DocUnits:"
if modelbuilder.unit_entities == 0 then
print "No doc units found"
else if modelbuilder.failed_entities == 0 and not toolcontext.opt_noact.value then
#!/bin/sh
-printf "%s\n" "$@" *.nit \
+printf "%s\n" "$@" \
+ ../src/nit*.nit \
+ ../src/test_*.nit \
../examples/*.nit \
../examples/*/*.nit \
../examples/shoot/src/shoot_logic.nit \
../lib/*/examples/*.nit \
../contrib/friendz/src/solver_cmd.nit \
../contrib/pep8analysis/src/pep8analysis.nit \
- ../src/nit*.nit \
- ../src/test_*.nit
+ *.nit
Total size of tables (classes and instances): 38 (not including stuff like info for subtyping or call-next-method)
Average size of table by runtime class: 6.33
Values never redefined: 32 (84.21%)
-generating out/nitmetrics_args1.write/project_hierarchy.dot
-generating out/nitmetrics_args1.write/module_hierarchy.dot
+generating project_hierarchy.dot
+generating module_hierarchy.dot
# Inheritance metrics
test_test_nitunit.nit:36,2--40,4: ERROR: test_foo1 (in file .nitunit/test_test_nitunit_TestX_test_foo1.nit): Runtime error: Assert failed (test_test_nitunit.nit:39)
-Results saved in out/nitunit_args1.write
-
DocUnits:
Entities: 27; Documented ones: 3; With nitunits: 3; Failures: 2
eab
f.c
dgh
-Solved, after looking at 14 positions during 0.0s
+Solved, after looking at 14 positions
Solution in 10 moves: right(>) down(v) left(<) left(<) up(^) right(>) right(>) up(^) left(<) left(<)
--- /dev/null
+Hello from `a`.
--- /dev/null
+Hello from `a`.
+Hello from `b`.
--- /dev/null
+Hello from `a`.
--- /dev/null
+Hello from `a`.
+Hello from `b`.
-Runtime error: Cast failed. Expected `E`, got `Bool` (../lib/standard/collection/array.nit:769)
+Runtime error: Cast failed. Expected `E`, got `Bool` (../lib/standard/collection/array.nit:789)
NativeString
N
Nit
--- /dev/null
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+`{
+ #include <stdio.h>
+`}
+
+fun print_a(str: String) import String.to_cstring `{
+ puts(String_to_cstring(str));
+`}
+
+print_a "Hello from `a`."
--- /dev/null
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import test_ffi_c_duplicated_callback_a
+
+`{
+ #include <stdio.h>
+`}
+
+fun print_b(str: String) import String.to_cstring `{
+ puts(String_to_cstring(str));
+`}
+
+print_a "Hello from `a`."
+print_b "Hello from `b`."
--- /dev/null
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+in "C++ Header" `{
+ #include <stdio.h>
+`}
+
+fun print_a(str: String) import String.to_cstring in "C++" `{
+ puts(String_to_cstring(str));
+`}
+
+print_a "Hello from `a`."
--- /dev/null
+# This file is part of NIT ( http://www.nitlanguage.org ).
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import test_ffi_cpp_duplicated_callback_a
+
+in "C++ header" `{
+ #include <stdio.h>
+`}
+
+fun print_b(str: String) import String.to_cstring in "C++" `{
+ puts(String_to_cstring(str));
+`}
+
+print_a "Hello from `a`."
+print_b "Hello from `b`."
if [ "x$XMLDIR" = "x" ]; then
xml="tests-$engine.xml"
else
- xml="$XMLDIR/tests-$engine.xml"
+ sum=`echo $@ | md5sum | cut -f1 -d " "`
+ xml="$XMLDIR/tests-$engine-$sum.xml"
+ mkdir -p "$XMLDIR"
fi
echo >$xml "<testsuites><testsuite>"