Move ASTSchemaSource and its components into rfc6020.repo
[yangtools.git] / yang / yang-parser-impl / src / main / java / org / opendaylight / yangtools / yang / parser / repo / SharedSchemaContextFactory.java
index e6e357dc0e9664f21595bf54066caea591071500..a797eca806fc6429709f23c60ff4b72fe599e23f 100644 (file)
 package org.opendaylight.yangtools.yang.parser.repo;
 
 import com.google.common.base.Function;
-import com.google.common.base.Optional;
 import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.collect.Collections2;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
 import com.google.common.util.concurrent.AsyncFunction;
-import com.google.common.util.concurrent.CheckedFuture;
 import com.google.common.util.concurrent.FutureCallback;
 import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
-import java.net.URI;
+import com.google.common.util.concurrent.MoreExecutors;
 import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
 import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Optional;
 import java.util.Set;
-import java.util.TreeMap;
-import javax.annotation.Nullable;
+import javax.annotation.Nonnull;
 import org.antlr.v4.runtime.ParserRuleContext;
-import org.antlr.v4.runtime.tree.ParseTreeWalker;
-import org.opendaylight.yangtools.util.concurrent.ExceptionMapper;
-import org.opendaylight.yangtools.util.concurrent.ReflectiveExceptionMapper;
-import org.opendaylight.yangtools.yang.model.api.Module;
+import org.opendaylight.yangtools.antlrv4.code.gen.YangStatementParser.StatementContext;
+import org.opendaylight.yangtools.yang.common.QName;
 import org.opendaylight.yangtools.yang.model.api.SchemaContext;
 import org.opendaylight.yangtools.yang.model.repo.api.SchemaContextFactory;
 import org.opendaylight.yangtools.yang.model.repo.api.SchemaResolutionException;
 import org.opendaylight.yangtools.yang.model.repo.api.SchemaSourceFilter;
 import org.opendaylight.yangtools.yang.model.repo.api.SourceIdentifier;
-import org.opendaylight.yangtools.yang.parser.builder.impl.BuilderUtils;
-import org.opendaylight.yangtools.yang.parser.builder.impl.ModuleBuilder;
-import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl;
-import org.opendaylight.yangtools.yang.parser.impl.YangParserListenerImpl;
-import org.opendaylight.yangtools.yang.parser.impl.util.YangModelDependencyInfo;
-import org.opendaylight.yangtools.yang.parser.util.ASTSchemaSource;
+import org.opendaylight.yangtools.yang.model.repo.api.StatementParserMode;
+import org.opendaylight.yangtools.yang.parser.impl.DefaultReactors;
+import org.opendaylight.yangtools.yang.parser.rfc6020.repo.ASTSchemaSource;
+import org.opendaylight.yangtools.yang.parser.rfc6020.repo.YangModelDependencyInfo;
+import org.opendaylight.yangtools.yang.parser.rfc6020.repo.YangStatementStreamSource;
+import org.opendaylight.yangtools.yang.parser.spi.meta.ReactorException;
+import org.opendaylight.yangtools.yang.parser.stmt.reactor.CrossSourceStatementReactor.BuildAction;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 final class SharedSchemaContextFactory implements SchemaContextFactory {
-    private static final ExceptionMapper<SchemaResolutionException> MAPPER = ReflectiveExceptionMapper.create("resolve sources", SchemaResolutionException.class);
     private static final Logger LOG = LoggerFactory.getLogger(SharedSchemaContextFactory.class);
 
-    private final Function<SourceIdentifier, ListenableFuture<ASTSchemaSource>> requestSources = new Function<SourceIdentifier, ListenableFuture<ASTSchemaSource>>() {
-        @Override
-        public ListenableFuture<ASTSchemaSource> apply(final SourceIdentifier input) {
-            return repository.getSchemaSource(input, ASTSchemaSource.class);
-        }
-    };
-    private final Cache<Collection<SourceIdentifier>, SchemaContext> cache = CacheBuilder.newBuilder().softValues().build();
-
-    private final AsyncFunction<List<ASTSchemaSource>, SchemaContext> assembleSources = new AsyncFunction<List<ASTSchemaSource>, SchemaContext>() {
-        @Override
-        public ListenableFuture<SchemaContext> apply(final List<ASTSchemaSource> sources) throws SchemaResolutionException {
-            final Map<SourceIdentifier, ASTSchemaSource> srcs =
-                    Maps.uniqueIndex(sources, ASTSchemaSource.GET_IDENTIFIER);
-            final Map<SourceIdentifier, YangModelDependencyInfo> deps =
-                    Maps.transformValues(srcs, ASTSchemaSource.GET_DEPINFO);
-
-            LOG.debug("Resolving dependency reactor {}", deps);
-
-            final DependencyResolver res = DependencyResolver.create(deps);
-            if (!res.getUnresolvedSources().isEmpty()) {
-                LOG.debug("Omitting models {} due to unsatisfied imports {}", res.getUnresolvedSources(), res.getUnsatisfiedImports());
-
-                // FIXME: push into DependencyResolver
-
-                throw new SchemaResolutionException("Failed to resolve required models",
-                        res.getResolvedSources(), res.getUnsatisfiedImports());
-            }
-
-            final Map<SourceIdentifier, ParserRuleContext> asts =
-                    Maps.transformValues(srcs, ASTSchemaSource.GET_AST);
-            final Map<String, TreeMap<Date, URI>> namespaceContext = BuilderUtils.createYangNamespaceContext(
-                    asts.values(), Optional.<SchemaContext>absent());
-
-            final ParseTreeWalker walker = new ParseTreeWalker();
-            final Map<SourceIdentifier, ModuleBuilder> sourceToBuilder = new LinkedHashMap<>();
-
-            for (final Entry<SourceIdentifier, ParserRuleContext> entry : asts.entrySet()) {
-                final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(),
-                        walker, entry.getValue()).getModuleBuilder();
-
-                moduleBuilder.setSource(srcs.get(entry.getKey()).getYangText());
-                sourceToBuilder.put(entry.getKey(), moduleBuilder);
-            }
-            LOG.debug("Modules ready for integration");
-
-            final YangParserImpl parser = YangParserImpl.getInstance();
-            final Collection<Module> modules = parser.buildModules(sourceToBuilder.values());
-            LOG.debug("Integrated cross-references modules");
-            return Futures.immediateCheckedFuture(parser.assembleContext(modules));
-        }
-    };
-
+    private final Cache<Collection<SourceIdentifier>, SchemaContext> cache = CacheBuilder.newBuilder().weakValues()
+            .build();
+    private final Cache<Collection<SourceIdentifier>, SchemaContext> semVerCache = CacheBuilder.newBuilder()
+            .weakValues().build();
     private final SharedSchemaRepository repository;
     // FIXME: ignored right now
     private final SchemaSourceFilter filter;
 
-    // FIXME SchemaRepository should be the type for repository parameter instead of SharedSchemaRepository (final implementation)
-    public SharedSchemaContextFactory(final SharedSchemaRepository repository, final SchemaSourceFilter filter) {
+    // FIXME SchemaRepository should be the type for repository parameter instead of SharedSchemaRepository
+    //       (final implementation)
+    SharedSchemaContextFactory(final SharedSchemaRepository repository, final SchemaSourceFilter filter) {
         this.repository = Preconditions.checkNotNull(repository);
         this.filter = Preconditions.checkNotNull(filter);
     }
 
     @Override
-    public CheckedFuture<SchemaContext, SchemaResolutionException> createSchemaContext(final Collection<SourceIdentifier> requiredSources) {
+    public ListenableFuture<SchemaContext> createSchemaContext(final Collection<SourceIdentifier> requiredSources,
+            final StatementParserMode statementParserMode, final Set<QName> supportedFeatures) {
+        return createSchemaContext(requiredSources,
+                statementParserMode == StatementParserMode.SEMVER_MODE ? this.semVerCache : this.cache,
+                new AssembleSources(Optional.ofNullable(supportedFeatures), statementParserMode));
+    }
+
+    private ListenableFuture<SchemaContext> createSchemaContext(final Collection<SourceIdentifier> requiredSources,
+            final Cache<Collection<SourceIdentifier>, SchemaContext> cache,
+            final AsyncFunction<List<ASTSchemaSource>, SchemaContext> assembleSources) {
         // Make sources unique
         final List<SourceIdentifier> uniqueSourceIdentifiers = deDuplicateSources(requiredSources);
 
         final SchemaContext existing = cache.getIfPresent(uniqueSourceIdentifiers);
         if (existing != null) {
             LOG.debug("Returning cached context {}", existing);
-            return Futures.immediateCheckedFuture(existing);
+            return Futures.immediateFuture(existing);
         }
 
         // Request all sources be loaded
-        ListenableFuture<List<ASTSchemaSource>> sf = Futures.allAsList(Collections2.transform(uniqueSourceIdentifiers, requestSources));
+        ListenableFuture<List<ASTSchemaSource>> sf = Futures.allAsList(Collections2.transform(uniqueSourceIdentifiers,
+            this::requestSource));
 
         // Detect mismatch between requested Source IDs and IDs that are extracted from parsed source
         // Also remove duplicates if present
         // We are relying on preserved order of uniqueSourceIdentifiers as well as sf
-        sf = Futures.transform(sf, new SourceIdMismatchDetector(uniqueSourceIdentifiers));
+        sf = Futures.transform(sf, new SourceIdMismatchDetector(uniqueSourceIdentifiers),
+            MoreExecutors.directExecutor());
 
         // Assemble sources into a schema context
-        final ListenableFuture<SchemaContext> cf = Futures.transform(sf, assembleSources);
+        final ListenableFuture<SchemaContext> cf = Futures.transformAsync(sf, assembleSources,
+            MoreExecutors.directExecutor());
 
         // Populate cache when successful
         Futures.addCallback(cf, new FutureCallback<SchemaContext>() {
@@ -149,40 +107,46 @@ final class SharedSchemaContextFactory implements SchemaContextFactory {
             }
 
             @Override
-            public void onFailure(final Throwable t) {
-                LOG.debug("Failed to assemble sources", t);
+            public void onFailure(@Nonnull final Throwable cause) {
+                LOG.debug("Failed to assemble sources", cause);
             }
-        });
+        }, MoreExecutors.directExecutor());
+
+        return cf;
+    }
 
-        return Futures.makeChecked(cf, MAPPER);
+    private ListenableFuture<ASTSchemaSource> requestSource(final SourceIdentifier identifier) {
+        return repository.getSchemaSource(identifier, ASTSchemaSource.class);
     }
 
     /**
+     * Return a set of de-duplicated inputs.
+     *
      * @return set (preserving ordering) from the input collection
      */
-    private List<SourceIdentifier> deDuplicateSources(final Collection<SourceIdentifier> requiredSources) {
-        final Set<SourceIdentifier> uniqueSourceIdentifiers = Collections.unmodifiableSet(Sets.newLinkedHashSet(requiredSources));
-        if(uniqueSourceIdentifiers.size() != requiredSources.size()) {
-            LOG.warn("Duplicate sources requested for schema context, removed duplicate sources: {}", Collections2.filter(uniqueSourceIdentifiers, new Predicate<SourceIdentifier>() {
-                @Override
-                public boolean apply(@Nullable final SourceIdentifier input) {
-                    return Iterables.frequency(requiredSources, input) > 1;
-                }
-            }));
+    private static List<SourceIdentifier> deDuplicateSources(final Collection<SourceIdentifier> requiredSources) {
+        final Set<SourceIdentifier> uniqueSourceIdentifiers = new LinkedHashSet<>(requiredSources);
+        if (uniqueSourceIdentifiers.size() == requiredSources.size()) {
+            // Can potentially reuse input
+            return ImmutableList.copyOf(requiredSources);
         }
-        return Lists.newArrayList(uniqueSourceIdentifiers);
+
+        LOG.warn("Duplicate sources requested for schema context, removed duplicate sources: {}",
+            Collections2.filter(uniqueSourceIdentifiers, input -> Iterables.frequency(requiredSources, input) > 1));
+        return ImmutableList.copyOf(uniqueSourceIdentifiers);
     }
 
-    private static final class SourceIdMismatchDetector implements Function<List<ASTSchemaSource>, List<ASTSchemaSource>> {
+    private static final class SourceIdMismatchDetector implements Function<List<ASTSchemaSource>,
+            List<ASTSchemaSource>> {
         private final List<SourceIdentifier> sourceIdentifiers;
 
-        public SourceIdMismatchDetector(final List<SourceIdentifier> sourceIdentifiers) {
-            this.sourceIdentifiers = sourceIdentifiers;
+        SourceIdMismatchDetector(final List<SourceIdentifier> sourceIdentifiers) {
+            this.sourceIdentifiers = Preconditions.checkNotNull(sourceIdentifiers);
         }
 
         @Override
         public List<ASTSchemaSource> apply(final List<ASTSchemaSource> input) {
-            final Map<SourceIdentifier, ASTSchemaSource> filtered = Maps.newLinkedHashMap();
+            final Map<SourceIdentifier, ASTSchemaSource> filtered = new LinkedHashMap<>();
 
             for (int i = 0; i < input.size(); i++) {
 
@@ -191,7 +155,8 @@ final class SharedSchemaContextFactory implements SchemaContextFactory {
                 final SourceIdentifier realSId = astSchemaSource.getIdentifier();
 
                 if (!expectedSId.equals(realSId)) {
-                    LOG.warn("Source identifier mismatch for module \"{}\", requested as {} but actually is {}. Using actual id", expectedSId.getName(), expectedSId, realSId);
+                    LOG.warn("Source identifier mismatch for module \"{}\", requested as {} but actually is {}. "
+                        + "Using actual id", expectedSId.getName(), expectedSId, realSId);
                 }
 
                 if (filtered.containsKey(realSId)) {
@@ -201,7 +166,70 @@ final class SharedSchemaContextFactory implements SchemaContextFactory {
                 filtered.put(realSId, astSchemaSource);
 
             }
-            return Lists.newArrayList(filtered.values());
+            return ImmutableList.copyOf(filtered.values());
+        }
+    }
+
+    private static final class AssembleSources implements AsyncFunction<List<ASTSchemaSource>, SchemaContext> {
+
+        private final Optional<Set<QName>> supportedFeatures;
+        private final StatementParserMode statementParserMode;
+        private final Function<ASTSchemaSource, SourceIdentifier> getIdentifier;
+
+        private AssembleSources(final Optional<Set<QName>> supportedFeatures,
+                final StatementParserMode statementParserMode) {
+            this.supportedFeatures = supportedFeatures;
+            this.statementParserMode = Preconditions.checkNotNull(statementParserMode);
+            switch (statementParserMode) {
+                case SEMVER_MODE:
+                    this.getIdentifier = ASTSchemaSource::getSemVerIdentifier;
+                    break;
+                default:
+                    this.getIdentifier = ASTSchemaSource::getIdentifier;
+            }
+        }
+
+        @Override
+        public ListenableFuture<SchemaContext> apply(@Nonnull final List<ASTSchemaSource> sources)
+                throws SchemaResolutionException, ReactorException {
+            final Map<SourceIdentifier, ASTSchemaSource> srcs = Maps.uniqueIndex(sources, getIdentifier);
+            final Map<SourceIdentifier, YangModelDependencyInfo> deps =
+                    Maps.transformValues(srcs, ASTSchemaSource::getDependencyInformation);
+
+            LOG.debug("Resolving dependency reactor {}", deps);
+
+            final DependencyResolver res = this.statementParserMode == StatementParserMode.SEMVER_MODE
+                    ? SemVerDependencyResolver.create(deps) : RevisionDependencyResolver.create(deps);
+            if (!res.getUnresolvedSources().isEmpty()) {
+                LOG.debug("Omitting models {} due to unsatisfied imports {}", res.getUnresolvedSources(),
+                    res.getUnsatisfiedImports());
+                throw new SchemaResolutionException("Failed to resolve required models",
+                        res.getResolvedSources(), res.getUnsatisfiedImports());
+            }
+
+            final BuildAction reactor = DefaultReactors.defaultReactor().newBuild(statementParserMode);
+            if (supportedFeatures.isPresent()) {
+                reactor.setSupportedFeatures(supportedFeatures.get());
+            }
+
+            for (final Entry<SourceIdentifier, ASTSchemaSource> e : srcs.entrySet()) {
+                final ASTSchemaSource ast = e.getValue();
+                final ParserRuleContext parserRuleCtx = ast.getAST();
+                Preconditions.checkArgument(parserRuleCtx instanceof StatementContext,
+                        "Unsupported context class %s for source %s", parserRuleCtx.getClass(), e.getKey());
+
+                reactor.addSource(YangStatementStreamSource.create(e.getKey(), (StatementContext) parserRuleCtx,
+                    ast.getSymbolicName().orElse(null)));
+            }
+
+            final SchemaContext schemaContext;
+            try {
+                schemaContext = reactor.buildEffective();
+            } catch (final ReactorException ex) {
+                throw new SchemaResolutionException("Failed to resolve required models", ex.getSourceIdentifier(), ex);
+            }
+
+            return Futures.immediateFuture(schemaContext);
         }
     }
 }