# Services for caching serialization engines
module caching
-import serialization
+import serialization_core
private import engine_tools
# A `Serializer` with a `cache`
# then using a reference.
class SerializerCache
# Map of already serialized objects to the reference id
- private var sent: Map[Serializable, Int] = new StrictHashMap[Serializable, Int]
+ protected var sent: Map[Serializable, Int] = new StrictHashMap[Serializable, Int]
# Is `object` known?
fun has_object(object: Serializable): Bool do return sent.keys.has(object)
# Require: `not has_object(object)`
fun new_id_for(object: Serializable): Int
do
- var id = sent.length
+ var id = next_available_id
sent[object] = id
return id
end
+
+ # Get a free id to associate to an object in the cache
+ protected fun next_available_id: Int do return sent.length
end
# Cache of received objects sorted by there reference id
# Used by `Deserializer` to find already deserialized objects by their reference.
class DeserializerCache
# Map of references to already deserialized objects.
- private var received: Map[Int, Object] = new StrictHashMap[Int, Object]
+ protected var received: Map[Int, Object] = new StrictHashMap[Int, Object]
# Is there an object associated to `id`?
fun has_id(id: Int): Bool do return received.keys.has(id)
sent[object] = id
end
end
+
+# A shared cache where 2 clients serialize objects at the same types, prevents references collision
+class AsyncCache
+ super DuplexCache
+
+ # Should this end use even numbers?
+ var use_even: Bool
+
+ private var last_id: Int is lazy do return if use_even then 0 else 1
+
+ redef fun next_available_id
+ do
+ last_id += 2
+ return last_id
+ end
+end