diff --git a/core/model-api/src/main/java/org/eclipse/rdf4j/model/ModelFactory.java b/core/model-api/src/main/java/org/eclipse/rdf4j/model/ModelFactory.java
index 99a9c96a880..5996354458d 100644
--- a/core/model-api/src/main/java/org/eclipse/rdf4j/model/ModelFactory.java
+++ b/core/model-api/src/main/java/org/eclipse/rdf4j/model/ModelFactory.java
@@ -7,6 +7,8 @@
*******************************************************************************/
package org.eclipse.rdf4j.model;
+import java.util.Collection;
+
/**
* Factory to create empty {@link Model} implementations.
*
@@ -19,4 +21,10 @@ public interface ModelFactory {
*/
Model createEmptyModel();
+ default Model createFrom(Collection extends Statement> collection) {
+ Model newModel = createEmptyModel();
+ newModel.addAll(collection);
+ return newModel;
+ }
+
}
diff --git a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/DynamicModel.java b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/DynamicModel.java
index 01d417fb973..eda3d0f50ef 100644
--- a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/DynamicModel.java
+++ b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/DynamicModel.java
@@ -10,7 +10,7 @@
import java.util.AbstractSet;
import java.util.Collection;
-import java.util.Collections;
+import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
@@ -35,7 +35,7 @@
* retrieving and removing data. The model will upgrade to a full model (provided by the modelFactory) if more complex
* operations are called, for instance removing data according to a pattern (eg. all statements with rdf:type as
* predicate).
- *
+ *
* DynamicModel is thread safe to the extent that the underlying LinkedHashMap or Model is. The upgrade path is
* protected by the actual upgrade method being synchronized. The LinkedHashMap storage is not removed once upgraded, so
* concurrent reads that have started reading from the LinkedHashMap can continue to read even during an upgrade. We do
@@ -49,10 +49,10 @@ public class DynamicModel extends AbstractSet implements Model {
private static final Resource[] NULL_CTX = new Resource[] { null };
- private Map statements = new LinkedHashMap<>();
+ private volatile Map statements = new LinkedHashMap<>();
final Set namespaces = new LinkedHashSet<>();
- volatile private Model model = null;
+ private volatile Model model = null;
private final ModelFactory modelFactory;
@@ -115,11 +115,15 @@ public boolean add(Resource subj, IRI pred, Value obj, Resource... contexts) {
}
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
+
boolean added = false;
for (Resource context : contexts) {
Statement statement = SimpleValueFactory.getInstance().createStatement(subj, pred, obj, context);
- added = added
- | statements.put(statement, statement) == null;
+ added = added | statements.put(statement, statement) == null;
}
return added;
} else {
@@ -140,6 +144,11 @@ public boolean remove(Resource subj, IRI pred, Value obj, Resource... contexts)
}
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
+
boolean removed = false;
for (Resource context : contexts) {
removed = removed
@@ -185,6 +194,11 @@ public Set contexts() {
@Override
public int size() {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
+
return statements.size();
}
return model.size();
@@ -193,6 +207,11 @@ public int size() {
@Override
public boolean isEmpty() {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
+
return statements.isEmpty();
}
return model.isEmpty();
@@ -201,6 +220,11 @@ public boolean isEmpty() {
@Override
public boolean contains(Object o) {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
+
return statements.containsKey(o);
}
return model.contains(o);
@@ -209,6 +233,10 @@ public boolean contains(Object o) {
@Override
public Iterator iterator() {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
return statements.values().iterator();
}
@@ -218,6 +246,10 @@ public Iterator iterator() {
@Override
public Object[] toArray() {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
return statements.values().toArray();
}
return model.toArray();
@@ -226,6 +258,10 @@ public Object[] toArray() {
@Override
public T[] toArray(T[] a) {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
return statements.values().toArray(a);
}
return model.toArray(a);
@@ -235,6 +271,10 @@ public T[] toArray(T[] a) {
public boolean add(Statement statement) {
Objects.requireNonNull(statement);
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
return statements.put(statement, statement) == null;
}
return model.add(statement);
@@ -244,6 +284,10 @@ public boolean add(Statement statement) {
public boolean remove(Object o) {
Objects.requireNonNull(o);
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
return statements.remove(o) != null;
}
return model.remove(o);
@@ -253,6 +297,10 @@ public boolean remove(Object o) {
public boolean containsAll(Collection> c) {
Objects.requireNonNull(c);
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
return statements.keySet().containsAll(c);
}
return model.containsAll(c);
@@ -264,6 +312,10 @@ public boolean addAll(Collection extends Statement> c) {
if (model == null) {
return c.stream()
.map(s -> {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
Objects.requireNonNull(s);
return statements.put(s, s) == null;
})
@@ -276,6 +328,10 @@ public boolean addAll(Collection extends Statement> c) {
@Override
public boolean retainAll(Collection> c) {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
return statements.keySet().retainAll(c);
}
return model.retainAll(c);
@@ -284,6 +340,10 @@ public boolean retainAll(Collection> c) {
@Override
public boolean removeAll(Collection> c) {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
return c
.stream()
.map(statements::remove)
@@ -297,6 +357,10 @@ public boolean removeAll(Collection> c) {
@Override
public void clear() {
if (model == null) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
statements.clear();
} else {
model.clear();
@@ -307,6 +371,11 @@ public void clear() {
public Iterable getStatements(Resource subject, IRI predicate, Value object, Resource... contexts) {
if (model == null && subject != null && predicate != null && object != null && contexts != null
&& contexts.length == 1) {
+ Map statements = this.statements;
+ if (statements == null) {
+ throw new ConcurrentModificationException();
+ }
+
Statement statement = SimpleValueFactory.getInstance()
.createStatement(subject, predicate, object, contexts[0]);
Statement foundStatement = statements.get(statement);
@@ -331,12 +400,11 @@ private void upgrade() {
synchronized private void synchronizedUpgrade() {
if (model == null) {
- // make statements unmodifiable first, to increase chance of an early failure if the user is doing
- // concurrent write with reads
- statements = Collections.unmodifiableMap(statements);
- Model tempModel = modelFactory.createEmptyModel();
- tempModel.addAll(statements.values());
- model = tempModel;
+ // Increase the chance of detecting if the user is doing concurrent write with reads by setting the
+ // statements field to null while we upgrade.
+ Map statements = this.statements;
+ this.statements = null;
+ model = modelFactory.createFrom(statements.values());
}
}
diff --git a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/DynamicModelFactory.java b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/DynamicModelFactory.java
index 86fc4f3c399..2757ebab702 100644
--- a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/DynamicModelFactory.java
+++ b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/DynamicModelFactory.java
@@ -11,7 +11,6 @@
/**
* Creates {@link DynamicModel}.
- *
*/
public class DynamicModelFactory implements ModelFactory {
diff --git a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/LinkedHashModel.java b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/LinkedHashModel.java
index 9c31f26a18c..33d4f2d90d3 100644
--- a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/LinkedHashModel.java
+++ b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/LinkedHashModel.java
@@ -20,6 +20,7 @@
import java.util.Optional;
import java.util.Set;
+import org.eclipse.rdf4j.common.annotation.InternalUseOnly;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.Namespace;
@@ -42,7 +43,6 @@
* "wrapped" using the * Models.synchronizedModel method.
*
*
- *
* @author James Leigh
*/
@SuppressWarnings("unchecked")
@@ -50,20 +50,20 @@ public class LinkedHashModel extends AbstractModel {
private static final long serialVersionUID = -9161104123818983614L;
- static final Resource[] NULL_CTX = new Resource[] { null };
+ private static final Resource[] NULL_CTX = new Resource[] { null };
- final Set namespaces = new LinkedHashSet<>();
+ private final Set namespaces = new LinkedHashSet<>();
- transient Map values;
+ private transient Map values;
- transient Set statements;
+ private transient Set statements;
public LinkedHashModel() {
this(128);
}
public LinkedHashModel(Model model) {
- this(model.getNamespaces());
+ this(model.getNamespaces(), model.size());
addAll(model);
}
@@ -129,7 +129,7 @@ public void setNamespace(Namespace namespace) {
@Override
public Optional removeNamespace(String prefix) {
Optional result = getNamespace(prefix);
- result.ifPresent(namespace -> namespaces.remove(namespace));
+ result.ifPresent(namespaces::remove);
return result;
}
@@ -149,9 +149,7 @@ public boolean add(Statement statement) {
ModelNode p = asNode(pred);
ModelNode o = asNode(obj);
ModelNode c = asNode(context);
- ModelStatement modelStatement = new ModelStatement(s, p, o, c, statement);
- return addModelStatement(modelStatement);
-
+ return addModelStatement(new LinkedHashModelStatement(s, p, o, c, statement));
}
@Override
@@ -169,7 +167,7 @@ public boolean add(Resource subj, IRI pred, Value obj, Resource... contexts) {
ModelNode p = asNode(pred);
ModelNode o = asNode(obj);
ModelNode c = asNode((Resource) ctx);
- ModelStatement st = new ModelStatement(s, p, o, c);
+ LinkedHashModelStatement st = new LinkedHashModelStatement(s, p, o, c);
changed |= addModelStatement(st);
}
return changed;
@@ -184,7 +182,7 @@ public void clear() {
@Override
public boolean contains(Object o) {
if (o instanceof Statement) {
- if (o instanceof ModelStatement) {
+ if (o instanceof LinkedHashModelStatement) {
return statements.contains(o);
}
@@ -209,7 +207,7 @@ public boolean remove(Resource subj, IRI pred, Value obj, Resource... contexts)
return false;
}
- Iterator iter = matchPattern(subj, pred, obj, contexts);
+ var iter = matchPattern(subj, pred, obj, contexts);
if (!iter.hasNext()) {
return false;
}
@@ -241,12 +239,12 @@ protected void removeFilteredTermIteration(Iterator iter, Resource su
@Override
public void removeTermIteration(Iterator iterator, Resource subj, IRI pred, Value obj, Resource... contexts) {
- Set owner = ((ModelIterator) iterator).getOwner();
- Set chosen = choose(subj, pred, obj, contexts);
- Iterator iter = chosen.iterator();
+ Set owner = ((ModelIterator) iterator).getOwner();
+ Set chosen = choose(subj, pred, obj, contexts);
+ Iterator iter = chosen.iterator();
iter = new PatternIterator<>(iter, subj, pred, obj, contexts);
while (iter.hasNext()) {
- ModelStatement last = iter.next();
+ LinkedHashModelStatement last = iter.next();
if (statements == owner) {
statements = new LinkedHashSet<>(statements);
statements.remove(last);
@@ -288,31 +286,31 @@ public boolean isEmpty() {
return statements.isEmpty();
}
- private class ModelIterator implements Iterator {
+ private class ModelIterator implements Iterator {
- private final Iterator iter;
+ private final Iterator iterator;
- private final Set owner;
+ private final Set owner;
- private ModelStatement last;
+ private LinkedHashModelStatement last;
- public ModelIterator(Iterator iter, Set owner) {
- this.iter = iter;
+ public ModelIterator(Iterator iterator, Set owner) {
+ this.iterator = iterator;
this.owner = owner;
}
- public Set getOwner() {
+ public Set getOwner() {
return owner;
}
@Override
public boolean hasNext() {
- return iter.hasNext();
+ return iterator.hasNext();
}
@Override
- public ModelStatement next() {
- return last = iter.next();
+ public LinkedHashModelStatement next() {
+ return last = iterator.next();
}
@Override
@@ -325,10 +323,10 @@ public void remove() {
removeFrom(last.pred.predicates);
removeFrom(last.obj.objects);
removeFrom(last.ctx.contexts);
- iter.remove(); // remove from owner
+ iterator.remove(); // remove from owner
}
- private void removeFrom(Set subjects) {
+ private void removeFrom(Set subjects) {
if (subjects != owner) {
subjects.remove(last);
}
@@ -339,13 +337,10 @@ private static class ModelNode implements Serializable {
private static final long serialVersionUID = -1205676084606998540L;
- Set subjects = new LinkedHashSet<>();
-
- Set predicates = new LinkedHashSet<>();
-
- Set objects = new LinkedHashSet<>();
-
- Set contexts = new LinkedHashSet<>();
+ private Set subjects = new LinkedHashSet<>();
+ private Set predicates = new LinkedHashSet<>();
+ private Set objects = new LinkedHashSet<>();
+ private Set contexts = new LinkedHashSet<>();
private final V value;
@@ -358,29 +353,32 @@ public V getValue() {
}
}
- public static class ModelStatement extends ContextStatement {
+ @InternalUseOnly
+ public static class LinkedHashModelStatement extends GenericStatement {
private static final long serialVersionUID = 2200404772364346279L;
- private Statement statement;
-
- ModelNode subj;
- ModelNode pred;
+ private final ModelNode subj;
+ private final ModelNode pred;
+ private final ModelNode obj;
+ private final ModelNode ctx;
- ModelNode obj;
+ private final Statement statement;
- ModelNode ctx;
+ private final int hashCode;
- public ModelStatement(ModelNode subj, ModelNode pred, ModelNode obj,
+ public LinkedHashModelStatement(ModelNode subj, ModelNode pred, ModelNode obj,
ModelNode ctx) {
super(subj.getValue(), pred.getValue(), obj.getValue(), ctx.getValue());
this.subj = subj;
this.pred = pred;
this.obj = obj;
this.ctx = ctx;
+ this.statement = null;
+ this.hashCode = calculateHashCode();
}
- public ModelStatement(ModelNode subj, ModelNode pred, ModelNode obj,
+ public LinkedHashModelStatement(ModelNode subj, ModelNode pred, ModelNode obj,
ModelNode ctx, Statement statement) {
super(subj.getValue(), pred.getValue(), obj.getValue(), ctx.getValue());
this.subj = subj;
@@ -388,48 +386,56 @@ public ModelStatement(ModelNode subj, ModelNode pred, ModelNode set = choose(subj, pred, obj, contexts);
- Iterator it = set.iterator();
- Iterator iter;
+ Set set = choose(subj, pred, obj, contexts);
+ Iterator it = set.iterator();
+ Iterator iter;
iter = new PatternIterator<>(it, subj, pred, obj, contexts);
return new ModelIterator(iter, set);
}
- private Set choose(Resource subj, IRI pred, Value obj, Resource... contexts) {
+ private Set choose(Resource subj, IRI pred, Value obj, Resource... contexts) {
contexts = notNull(contexts);
- Set s = null;
- Set p = null;
- Set o = null;
+ Set s = null;
+ Set p = null;
+ Set o = null;
if (subj != null) {
- ModelNode modelNode = values.get(subj);
+ var modelNode = values.get(subj);
if (modelNode == null) {
return Collections.emptySet();
}
s = modelNode.subjects;
}
if (pred != null) {
- ModelNode modelNode = values.get(pred);
+ var modelNode = values.get(pred);
if (modelNode == null) {
return Collections.emptySet();
}
p = modelNode.predicates;
}
if (obj != null) {
- ModelNode modelNode = values.get(obj);
+ var modelNode = values.get(obj);
if (modelNode == null) {
return Collections.emptySet();
}
o = modelNode.objects;
}
if (contexts.length == 1) {
- ModelNode modelNode = values.get(contexts[0]);
+ var modelNode = values.get(contexts[0]);
if (modelNode == null) {
return Collections.emptySet();
}
- Set c = modelNode.contexts;
+ Set c = modelNode.contexts;
return smallest(statements, s, p, o, c);
} else {
return smallest(statements, s, p, o);
@@ -514,7 +520,7 @@ private Resource[] notNull(Resource[] contexts) {
return contexts;
}
- private Iterator find(Statement st) {
+ private Iterator find(Statement st) {
Resource subj = st.getSubject();
IRI pred = st.getPredicate();
Value obj = st.getObject();
@@ -522,11 +528,11 @@ private Iterator find(Statement st) {
return matchPattern(subj, pred, obj, ctx);
}
- private boolean addModelStatement(ModelStatement st) {
- Set subj = st.subj.subjects;
- Set pred = st.pred.predicates;
- Set obj = st.obj.objects;
- Set ctx = st.ctx.contexts;
+ private boolean addModelStatement(LinkedHashModelStatement st) {
+ Set subj = st.subj.subjects;
+ Set pred = st.pred.predicates;
+ Set obj = st.obj.objects;
+ Set ctx = st.ctx.contexts;
if (smallest(subj, pred, obj, ctx).contains(st)) {
return false;
}
@@ -538,10 +544,10 @@ private boolean addModelStatement(ModelStatement st) {
return true;
}
- private Set smallest(Set... sets) {
+ private Set smallest(Set... sets) {
int minSize = Integer.MAX_VALUE;
- Set minSet = null;
- for (Set set : sets) {
+ Set minSet = null;
+ for (Set set : sets) {
if (set != null && set.size() < minSize) {
minSet = set;
minSize = set.size();
@@ -559,4 +565,29 @@ private ModelNode asNode(V value) {
values.put(value, node);
return node;
}
+
+ /**
+ * This class was only made public in order to improve the performance of the MemoryStore. It has been replaced by
+ * LinkedHashModelStatement which is for {@link InternalUseOnly}.
+ */
+ @Deprecated(since = "4.1.0", forRemoval = true)
+ public static class ModelStatement extends ContextStatement {
+
+ public ModelStatement(ModelNode subj, ModelNode pred, ModelNode obj,
+ ModelNode ctx) {
+ super(subj.getValue(), pred.getValue(), obj.getValue(), ctx.getValue());
+ throw new IllegalStateException("This class is no longer in use!");
+ }
+
+ public ModelStatement(ModelNode subj, ModelNode pred, ModelNode obj,
+ ModelNode ctx, Statement statement) {
+ super(subj.getValue(), pred.getValue(), obj.getValue(), ctx.getValue());
+ throw new IllegalStateException("This class is no longer in use!");
+ }
+
+ public Statement getStatement() {
+ throw new IllegalStateException("This class is no longer in use!");
+ }
+ }
+
}
diff --git a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/LinkedHashModelFactory.java b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/LinkedHashModelFactory.java
index c1da27f4dc0..b8649f7ff01 100644
--- a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/LinkedHashModelFactory.java
+++ b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/LinkedHashModelFactory.java
@@ -8,8 +8,11 @@
package org.eclipse.rdf4j.model.impl;
import java.io.Serializable;
+import java.util.Collection;
+import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.ModelFactory;
+import org.eclipse.rdf4j.model.Statement;
/**
* Creates {@link LinkedHashModel}.
@@ -25,4 +28,9 @@ public LinkedHashModel createEmptyModel() {
return new LinkedHashModel();
}
+ @Override
+ public Model createFrom(Collection extends Statement> collection) {
+ return new LinkedHashModel(collection);
+ }
+
}
diff --git a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/TreeModelFactory.java b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/TreeModelFactory.java
index 28c494badf3..bcc8a31ad07 100644
--- a/core/model/src/main/java/org/eclipse/rdf4j/model/impl/TreeModelFactory.java
+++ b/core/model/src/main/java/org/eclipse/rdf4j/model/impl/TreeModelFactory.java
@@ -7,7 +7,11 @@
*******************************************************************************/
package org.eclipse.rdf4j.model.impl;
+import java.util.Collection;
+
+import org.eclipse.rdf4j.model.Model;
import org.eclipse.rdf4j.model.ModelFactory;
+import org.eclipse.rdf4j.model.Statement;
/**
* Creates {@link TreeModel}.
@@ -21,4 +25,9 @@ public TreeModel createEmptyModel() {
return new TreeModel();
}
+ @Override
+ public Model createFrom(Collection extends Statement> collection) {
+ return new TreeModel(collection);
+ }
+
}
diff --git a/core/model/src/test/java/org/eclipse/rdf4j/model/impl/DynamicModelConcurrentModificationAndUpgradeTest.java b/core/model/src/test/java/org/eclipse/rdf4j/model/impl/DynamicModelConcurrentModificationAndUpgradeTest.java
index 3e2033225dc..f363eff1eea 100644
--- a/core/model/src/test/java/org/eclipse/rdf4j/model/impl/DynamicModelConcurrentModificationAndUpgradeTest.java
+++ b/core/model/src/test/java/org/eclipse/rdf4j/model/impl/DynamicModelConcurrentModificationAndUpgradeTest.java
@@ -13,6 +13,7 @@
import java.util.Arrays;
import java.util.Collection;
+import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CountDownLatch;
@@ -37,12 +38,12 @@ public void testConcurrentAddAndUpgrade() throws InterruptedException {
Exception exception = runTest();
if (exception != null) {
- assertThat(exception).isInstanceOf(UnsupportedOperationException.class);
+ assertThat(exception).isInstanceOf(ConcurrentModificationException.class);
return;
}
}
- fail("There should have been an UnsupportedOperationException earlier");
+ fail("There should have been a ConcurrentModificationException earlier");
}
diff --git a/core/sail/memory/src/main/java/org/eclipse/rdf4j/sail/memory/MemorySailStore.java b/core/sail/memory/src/main/java/org/eclipse/rdf4j/sail/memory/MemorySailStore.java
index 9bba1e6d30b..51cb02c348d 100644
--- a/core/sail/memory/src/main/java/org/eclipse/rdf4j/sail/memory/MemorySailStore.java
+++ b/core/sail/memory/src/main/java/org/eclipse/rdf4j/sail/memory/MemorySailStore.java
@@ -780,11 +780,12 @@ private void innerDeprecate(Statement statement, int nextSnapshot) {
&& toDeprecate.isExplicit() == explicit) {
toDeprecate.setTillSnapshot(nextSnapshot);
}
- } else if (statement instanceof LinkedHashModel.ModelStatement
- && ((LinkedHashModel.ModelStatement) statement).getStatement() instanceof MemStatement) {
+ } else if (statement instanceof LinkedHashModel.LinkedHashModelStatement
+ && ((LinkedHashModel.LinkedHashModelStatement) statement).getStatement() instanceof MemStatement) {
// The Changeset uses a LinkedHashModel to store it's changes. It still keeps a reference to the
// original statement that can be retrieved here.
- MemStatement toDeprecate = (MemStatement) ((LinkedHashModel.ModelStatement) statement).getStatement();
+ MemStatement toDeprecate = (MemStatement) ((LinkedHashModel.LinkedHashModelStatement) statement)
+ .getStatement();
if ((nextSnapshot < 0 || toDeprecate.isInSnapshot(nextSnapshot))
&& toDeprecate.isExplicit() == explicit) {
toDeprecate.setTillSnapshot(nextSnapshot);