X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=yang%2Fyang-parser-impl%2Fsrc%2Fmain%2Fjava%2Forg%2Fopendaylight%2Fyangtools%2Fyang%2Fparser%2Frepo%2FSharedSchemaContextFactory.java;h=e9a4fc6d842f55158ae155a19327b69881e30f84;hb=1d4d5d61d5c50661d898b1b184f984d055486fba;hp=e6e357dc0e9664f21595bf54066caea591071500;hpb=25de0a162a30fdc883a0208f626cf464d6600fa1;p=yangtools.git diff --git a/yang/yang-parser-impl/src/main/java/org/opendaylight/yangtools/yang/parser/repo/SharedSchemaContextFactory.java b/yang/yang-parser-impl/src/main/java/org/opendaylight/yangtools/yang/parser/repo/SharedSchemaContextFactory.java index e6e357dc0e..e9a4fc6d84 100644 --- a/yang/yang-parser-impl/src/main/java/org/opendaylight/yangtools/yang/parser/repo/SharedSchemaContextFactory.java +++ b/yang/yang-parser-impl/src/main/java/org/opendaylight/yangtools/yang/parser/repo/SharedSchemaContextFactory.java @@ -7,201 +7,253 @@ */ package org.opendaylight.yangtools.yang.parser.repo; +import static java.util.Objects.requireNonNull; +import static org.opendaylight.yangtools.util.concurrent.FluentFutures.immediateFluentFuture; + import com.google.common.base.Function; -import com.google.common.base.Optional; -import com.google.common.base.Preconditions; -import com.google.common.base.Predicate; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.Collections2; +import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.AsyncFunction; -import com.google.common.util.concurrent.CheckedFuture; +import com.google.common.util.concurrent.FluentFuture; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import java.net.URI; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.common.util.concurrent.SettableFuture; +import java.io.IOException; import java.util.Collection; -import java.util.Collections; -import java.util.Date; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; -import java.util.TreeMap; -import javax.annotation.Nullable; -import org.antlr.v4.runtime.ParserRuleContext; -import org.antlr.v4.runtime.tree.ParseTreeWalker; -import org.opendaylight.yangtools.util.concurrent.ExceptionMapper; -import org.opendaylight.yangtools.util.concurrent.ReflectiveExceptionMapper; -import org.opendaylight.yangtools.yang.model.api.Module; -import org.opendaylight.yangtools.yang.model.api.SchemaContext; -import org.opendaylight.yangtools.yang.model.repo.api.SchemaContextFactory; +import java.util.concurrent.ExecutionException; +import org.eclipse.jdt.annotation.NonNull; +import org.gaul.modernizer_maven_annotations.SuppressModernizer; +import org.opendaylight.yangtools.concepts.SemVer; +import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext; +import org.opendaylight.yangtools.yang.model.parser.api.YangParser; +import org.opendaylight.yangtools.yang.model.parser.api.YangParserException; +import org.opendaylight.yangtools.yang.model.parser.api.YangParserFactory; +import org.opendaylight.yangtools.yang.model.parser.api.YangSyntaxErrorException; +import org.opendaylight.yangtools.yang.model.repo.api.EffectiveModelContextFactory; +import org.opendaylight.yangtools.yang.model.repo.api.SchemaContextFactoryConfiguration; import org.opendaylight.yangtools.yang.model.repo.api.SchemaResolutionException; -import org.opendaylight.yangtools.yang.model.repo.api.SchemaSourceFilter; +import org.opendaylight.yangtools.yang.model.repo.api.SemVerSourceIdentifier; import org.opendaylight.yangtools.yang.model.repo.api.SourceIdentifier; -import org.opendaylight.yangtools.yang.parser.builder.impl.BuilderUtils; -import org.opendaylight.yangtools.yang.parser.builder.impl.ModuleBuilder; -import org.opendaylight.yangtools.yang.parser.impl.YangParserImpl; -import org.opendaylight.yangtools.yang.parser.impl.YangParserListenerImpl; -import org.opendaylight.yangtools.yang.parser.impl.util.YangModelDependencyInfo; -import org.opendaylight.yangtools.yang.parser.util.ASTSchemaSource; +import org.opendaylight.yangtools.yang.model.repo.api.StatementParserMode; +import org.opendaylight.yangtools.yang.parser.rfc7950.ir.IRSchemaSource; +import org.opendaylight.yangtools.yang.parser.rfc7950.repo.YangModelDependencyInfo; +import org.opendaylight.yangtools.yang.parser.spi.meta.ReactorException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -final class SharedSchemaContextFactory implements SchemaContextFactory { - private static final ExceptionMapper MAPPER = ReflectiveExceptionMapper.create("resolve sources", SchemaResolutionException.class); +final class SharedSchemaContextFactory implements EffectiveModelContextFactory { private static final Logger LOG = LoggerFactory.getLogger(SharedSchemaContextFactory.class); - private final Function> requestSources = new Function>() { - @Override - public ListenableFuture apply(final SourceIdentifier input) { - return repository.getSchemaSource(input, ASTSchemaSource.class); - } - }; - private final Cache, SchemaContext> cache = CacheBuilder.newBuilder().softValues().build(); - - private final AsyncFunction, SchemaContext> assembleSources = new AsyncFunction, SchemaContext>() { - @Override - public ListenableFuture apply(final List sources) throws SchemaResolutionException { - final Map srcs = - Maps.uniqueIndex(sources, ASTSchemaSource.GET_IDENTIFIER); - final Map deps = - Maps.transformValues(srcs, ASTSchemaSource.GET_DEPINFO); - - LOG.debug("Resolving dependency reactor {}", deps); - - final DependencyResolver res = DependencyResolver.create(deps); - if (!res.getUnresolvedSources().isEmpty()) { - LOG.debug("Omitting models {} due to unsatisfied imports {}", res.getUnresolvedSources(), res.getUnsatisfiedImports()); - - // FIXME: push into DependencyResolver - - throw new SchemaResolutionException("Failed to resolve required models", - res.getResolvedSources(), res.getUnsatisfiedImports()); - } - - final Map asts = - Maps.transformValues(srcs, ASTSchemaSource.GET_AST); - final Map> namespaceContext = BuilderUtils.createYangNamespaceContext( - asts.values(), Optional.absent()); - - final ParseTreeWalker walker = new ParseTreeWalker(); - final Map sourceToBuilder = new LinkedHashMap<>(); - - for (final Entry entry : asts.entrySet()) { - final ModuleBuilder moduleBuilder = YangParserListenerImpl.create(namespaceContext, entry.getKey().getName(), - walker, entry.getValue()).getModuleBuilder(); - - moduleBuilder.setSource(srcs.get(entry.getKey()).getYangText()); - sourceToBuilder.put(entry.getKey(), moduleBuilder); - } - LOG.debug("Modules ready for integration"); - - final YangParserImpl parser = YangParserImpl.getInstance(); - final Collection modules = parser.buildModules(sourceToBuilder.values()); - LOG.debug("Integrated cross-references modules"); - return Futures.immediateCheckedFuture(parser.assembleContext(modules)); - } - }; - - private final SharedSchemaRepository repository; - // FIXME: ignored right now - private final SchemaSourceFilter filter; - - // FIXME SchemaRepository should be the type for repository parameter instead of SharedSchemaRepository (final implementation) - public SharedSchemaContextFactory(final SharedSchemaRepository repository, final SchemaSourceFilter filter) { - this.repository = Preconditions.checkNotNull(repository); - this.filter = Preconditions.checkNotNull(filter); + private final Cache, EffectiveModelContext> revisionCache = CacheBuilder.newBuilder() + .weakValues().build(); + private final Cache, EffectiveModelContext> semVerCache = CacheBuilder.newBuilder() + .weakValues().build(); + private final @NonNull SharedSchemaRepository repository; + private final @NonNull SchemaContextFactoryConfiguration config; + + SharedSchemaContextFactory(final @NonNull SharedSchemaRepository repository, + final @NonNull SchemaContextFactoryConfiguration config) { + this.repository = requireNonNull(repository); + this.config = requireNonNull(config); } @Override - public CheckedFuture createSchemaContext(final Collection requiredSources) { + public @NonNull ListenableFuture createEffectiveModelContext( + final @NonNull Collection requiredSources) { + return createSchemaContext(requiredSources, + config.getStatementParserMode() == StatementParserMode.SEMVER_MODE ? semVerCache : revisionCache, + new AssembleSources(repository.factory(), config)); + } + + private @NonNull ListenableFuture createSchemaContext( + final Collection requiredSources, + final Cache, EffectiveModelContext> cache, + final AsyncFunction, EffectiveModelContext> assembleSources) { // Make sources unique final List uniqueSourceIdentifiers = deDuplicateSources(requiredSources); - final SchemaContext existing = cache.getIfPresent(uniqueSourceIdentifiers); + final EffectiveModelContext existing = cache.getIfPresent(uniqueSourceIdentifiers); if (existing != null) { LOG.debug("Returning cached context {}", existing); - return Futures.immediateCheckedFuture(existing); + return immediateFluentFuture(existing); } // Request all sources be loaded - ListenableFuture> sf = Futures.allAsList(Collections2.transform(uniqueSourceIdentifiers, requestSources)); + ListenableFuture> sf = Futures.allAsList(Collections2.transform(uniqueSourceIdentifiers, + this::requestSource)); // Detect mismatch between requested Source IDs and IDs that are extracted from parsed source // Also remove duplicates if present // We are relying on preserved order of uniqueSourceIdentifiers as well as sf - sf = Futures.transform(sf, new SourceIdMismatchDetector(uniqueSourceIdentifiers)); + sf = Futures.transform(sf, new SourceIdMismatchDetector(uniqueSourceIdentifiers), + MoreExecutors.directExecutor()); // Assemble sources into a schema context - final ListenableFuture cf = Futures.transform(sf, assembleSources); + final ListenableFuture cf = Futures.transformAsync(sf, assembleSources, + MoreExecutors.directExecutor()); - // Populate cache when successful - Futures.addCallback(cf, new FutureCallback() { + final SettableFuture rf = SettableFuture.create(); + Futures.addCallback(cf, new FutureCallback() { @Override - public void onSuccess(final SchemaContext result) { - cache.put(uniqueSourceIdentifiers, result); + public void onSuccess(final EffectiveModelContext result) { + // Deduplicate concurrent loads + final EffectiveModelContext existing; + try { + existing = cache.get(uniqueSourceIdentifiers, () -> result); + } catch (ExecutionException e) { + LOG.warn("Failed to recheck result with cache, will use computed value", e); + rf.set(result); + return; + } + + rf.set(existing); } @Override - public void onFailure(final Throwable t) { - LOG.debug("Failed to assemble sources", t); + public void onFailure(final Throwable cause) { + LOG.debug("Failed to assemble sources", cause); + rf.setException(cause); } - }); + }, MoreExecutors.directExecutor()); + + return rf; + } - return Futures.makeChecked(cf, MAPPER); + private ListenableFuture requestSource(final @NonNull SourceIdentifier identifier) { + return repository.getSchemaSource(identifier, IRSchemaSource.class); } /** + * Return a set of de-duplicated inputs. + * * @return set (preserving ordering) from the input collection */ - private List deDuplicateSources(final Collection requiredSources) { - final Set uniqueSourceIdentifiers = Collections.unmodifiableSet(Sets.newLinkedHashSet(requiredSources)); - if(uniqueSourceIdentifiers.size() != requiredSources.size()) { - LOG.warn("Duplicate sources requested for schema context, removed duplicate sources: {}", Collections2.filter(uniqueSourceIdentifiers, new Predicate() { - @Override - public boolean apply(@Nullable final SourceIdentifier input) { - return Iterables.frequency(requiredSources, input) > 1; - } - })); + private static List deDuplicateSources(final Collection requiredSources) { + final Set uniqueSourceIdentifiers = new LinkedHashSet<>(requiredSources); + if (uniqueSourceIdentifiers.size() == requiredSources.size()) { + // Can potentially reuse input + return ImmutableList.copyOf(requiredSources); } - return Lists.newArrayList(uniqueSourceIdentifiers); + + LOG.warn("Duplicate sources requested for schema context, removed duplicate sources: {}", + Collections2.filter(uniqueSourceIdentifiers, input -> Iterables.frequency(requiredSources, input) > 1)); + return ImmutableList.copyOf(uniqueSourceIdentifiers); } - private static final class SourceIdMismatchDetector implements Function, List> { + @SuppressModernizer + private static final class SourceIdMismatchDetector implements Function, + List> { private final List sourceIdentifiers; - public SourceIdMismatchDetector(final List sourceIdentifiers) { - this.sourceIdentifiers = sourceIdentifiers; + SourceIdMismatchDetector(final List sourceIdentifiers) { + this.sourceIdentifiers = requireNonNull(sourceIdentifiers); } @Override - public List apply(final List input) { - final Map filtered = Maps.newLinkedHashMap(); + public List apply(final List input) { + final Map filtered = new LinkedHashMap<>(); for (int i = 0; i < input.size(); i++) { final SourceIdentifier expectedSId = sourceIdentifiers.get(i); - final ASTSchemaSource astSchemaSource = input.get(i); - final SourceIdentifier realSId = astSchemaSource.getIdentifier(); + final IRSchemaSource irSchemaSource = input.get(i); + final SourceIdentifier realSId = irSchemaSource.getIdentifier(); if (!expectedSId.equals(realSId)) { - LOG.warn("Source identifier mismatch for module \"{}\", requested as {} but actually is {}. Using actual id", expectedSId.getName(), expectedSId, realSId); + LOG.warn("Source identifier mismatch for module \"{}\", requested as {} but actually is {}. " + + "Using actual id", expectedSId.getName(), expectedSId, realSId); } if (filtered.containsKey(realSId)) { LOG.warn("Duplicate source for module {} detected in reactor", realSId); } - filtered.put(realSId, astSchemaSource); + filtered.put(realSId, irSchemaSource); + + } + return ImmutableList.copyOf(filtered.values()); + } + } + private static final class AssembleSources implements AsyncFunction, EffectiveModelContext> { + private final @NonNull YangParserFactory parserFactory; + private final @NonNull SchemaContextFactoryConfiguration config; + private final @NonNull Function getIdentifier; + + private AssembleSources(final @NonNull YangParserFactory parserFactory, + final @NonNull SchemaContextFactoryConfiguration config) { + this.parserFactory = parserFactory; + this.config = config; + switch (config.getStatementParserMode()) { + case SEMVER_MODE: + this.getIdentifier = AssembleSources::getSemVerIdentifier; + break; + default: + this.getIdentifier = IRSchemaSource::getIdentifier; } - return Lists.newArrayList(filtered.values()); + } + + @Override + public FluentFuture apply(final List sources) + throws SchemaResolutionException, ReactorException { + final Map srcs = Maps.uniqueIndex(sources, getIdentifier); + final Map deps = + Maps.transformValues(srcs, YangModelDependencyInfo::forIR); + + LOG.debug("Resolving dependency reactor {}", deps); + + final StatementParserMode statementParserMode = config.getStatementParserMode(); + final DependencyResolver res = statementParserMode == StatementParserMode.SEMVER_MODE + ? SemVerDependencyResolver.create(deps) : RevisionDependencyResolver.create(deps); + if (!res.getUnresolvedSources().isEmpty()) { + LOG.debug("Omitting models {} due to unsatisfied imports {}", res.getUnresolvedSources(), + res.getUnsatisfiedImports()); + throw new SchemaResolutionException("Failed to resolve required models", + res.getResolvedSources(), res.getUnsatisfiedImports()); + } + + final YangParser parser = parserFactory.createParser(statementParserMode); + config.getSupportedFeatures().ifPresent(parser::setSupportedFeatures); + config.getModulesDeviatedByModules().ifPresent(parser::setModulesWithSupportedDeviations); + + for (final Entry entry : srcs.entrySet()) { + try { + parser.addSource(entry.getValue()); + } catch (YangSyntaxErrorException | IOException e) { + throw new SchemaResolutionException("Failed to add source " + entry.getKey(), e); + } + } + + final EffectiveModelContext schemaContext; + try { + schemaContext = parser.buildEffectiveModel(); + } catch (final YangParserException e) { + throw new SchemaResolutionException("Failed to resolve required models", e); + } + + return immediateFluentFuture(schemaContext); + } + + private static SemVerSourceIdentifier getSemVerIdentifier(final IRSchemaSource source) { + final SourceIdentifier identifier = source.getIdentifier(); + final SemVer semver = YangModelDependencyInfo.findSemanticVersion(source.getRootStatement(), identifier); + if (identifier instanceof SemVerSourceIdentifier && semver == null) { + return (SemVerSourceIdentifier) identifier; + } + + return SemVerSourceIdentifier.create(identifier.getName(), identifier.getRevision(), semver); } } }