import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
+import com.google.common.base.Predicate;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Collections2;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AsyncFunction;
import com.google.common.util.concurrent.CheckedFuture;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.ListenableFuture;
import java.net.URI;
import java.util.Collection;
+import java.util.Collections;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.Set;
import java.util.TreeMap;
+import javax.annotation.Nullable;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.opendaylight.yangtools.util.concurrent.ExceptionMapper;
final Map<SourceIdentifier, ParserRuleContext> asts =
Maps.transformValues(srcs, ASTSchemaSource.GET_AST);
final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
- asts.values(), Optional.<SchemaContext> absent());
+ asts.values(), Optional.<SchemaContext>absent());
final ParseTreeWalker walker = new ParseTreeWalker();
final Map<SourceIdentifier, ModuleBuilder> sourceToBuilder = new LinkedHashMap<>();
- for (Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
- ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
+ for (final Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
+ final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
walker, entry.getValue()).getModuleBuilder();
moduleBuilder.setSource(srcs.get(entry.getKey()).getYangText());
// FIXME: ignored right now
private final SchemaSourceFilter filter;
+ // FIXME SchemaRepository should be the type for repository parameter instead of SharedSchemaRepository (final implementation)
public SharedSchemaContextFactory(final SharedSchemaRepository repository, final SchemaSourceFilter filter) {
this.repository = Preconditions.checkNotNull(repository);
this.filter = Preconditions.checkNotNull(filter);
@Override
public CheckedFuture<SchemaContext, SchemaResolutionException> createSchemaContext(final Collection<SourceIdentifier> requiredSources) {
- final SchemaContext existing = cache.getIfPresent(requiredSources);
+ // Make sources unique
+ final List<SourceIdentifier> uniqueSourceIdentifiers = deDuplicateSources(requiredSources);
+
+ final SchemaContext existing = cache.getIfPresent(uniqueSourceIdentifiers);
if (existing != null) {
LOG.debug("Returning cached context {}", existing);
return Futures.immediateCheckedFuture(existing);
}
// Request all sources be loaded
- final ListenableFuture<List<ASTSchemaSource>> sf = Futures.allAsList(Collections2.transform(requiredSources, requestSources));
+ ListenableFuture<List<ASTSchemaSource>> sf = Futures.allAsList(Collections2.transform(uniqueSourceIdentifiers, requestSources));
+
+ // Detect mismatch between requested Source IDs and IDs that are extracted from parsed source
+ // Also remove duplicates if present
+ // We are relying on preserved order of uniqueSourceIdentifiers as well as sf
+ sf = Futures.transform(sf, new SourceIdMismatchDetector(uniqueSourceIdentifiers));
// Assemble sources into a schema context
final ListenableFuture<SchemaContext> cf = Futures.transform(sf, assembleSources);
Futures.addCallback(cf, new FutureCallback<SchemaContext>() {
@Override
public void onSuccess(final SchemaContext result) {
- cache.put(requiredSources, result);
+ cache.put(uniqueSourceIdentifiers, result);
}
@Override
public void onFailure(final Throwable t) {
- LOG.info("Failed to assemble sources", t);
+ LOG.debug("Failed to assemble sources", t);
}
});
return Futures.makeChecked(cf, MAPPER);
}
-}
\ No newline at end of file
+
+ /**
+ * @return set (preserving ordering) from the input collection
+ */
+ private List<SourceIdentifier> deDuplicateSources(final Collection<SourceIdentifier> requiredSources) {
+ final Set<SourceIdentifier> uniqueSourceIdentifiers = Collections.unmodifiableSet(Sets.newLinkedHashSet(requiredSources));
+ if(uniqueSourceIdentifiers.size() != requiredSources.size()) {
+ LOG.warn("Duplicate sources requested for schema context, removed duplicate sources: {}", Collections2.filter(uniqueSourceIdentifiers, new Predicate<SourceIdentifier>() {
+ @Override
+ public boolean apply(@Nullable final SourceIdentifier input) {
+ return Iterables.frequency(requiredSources, input) > 1;
+ }
+ }));
+ }
+ return Lists.newArrayList(uniqueSourceIdentifiers);
+ }
+
+ private static final class SourceIdMismatchDetector implements Function<List<ASTSchemaSource>, List<ASTSchemaSource>> {
+ private final List<SourceIdentifier> sourceIdentifiers;
+
+ public SourceIdMismatchDetector(final List<SourceIdentifier> sourceIdentifiers) {
+ this.sourceIdentifiers = sourceIdentifiers;
+ }
+
+ @Override
+ public List<ASTSchemaSource> apply(final List<ASTSchemaSource> input) {
+ final Map<SourceIdentifier, ASTSchemaSource> filtered = Maps.newLinkedHashMap();
+
+ for (int i = 0; i < input.size(); i++) {
+
+ final SourceIdentifier expectedSId = sourceIdentifiers.get(i);
+ final ASTSchemaSource astSchemaSource = input.get(i);
+ final SourceIdentifier realSId = astSchemaSource.getIdentifier();
+
+ if (!expectedSId.equals(realSId)) {
+ LOG.warn("Source identifier mismatch for module \"{}\", requested as {} but actually is {}. Using actual id", expectedSId.getName(), expectedSId, realSId);
+ }
+
+ if (filtered.containsKey(realSId)) {
+ LOG.warn("Duplicate source for module {} detected in reactor", realSId);
+ }
+
+ filtered.put(realSId, astSchemaSource);
+
+ }
+ return Lists.newArrayList(filtered.values());
+ }
+ }
+}