*/
package org.opendaylight.yangtools.yang.parser.repo;
-import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
import static org.opendaylight.yangtools.util.concurrent.FluentFutures.immediateFluentFuture;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
+import com.google.common.util.concurrent.SettableFuture;
+import java.io.IOException;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import org.antlr.v4.runtime.ParserRuleContext;
+import java.util.concurrent.ExecutionException;
import org.eclipse.jdt.annotation.NonNull;
-import org.opendaylight.yangtools.antlrv4.code.gen.YangStatementParser.StatementContext;
+import org.gaul.modernizer_maven_annotations.SuppressModernizer;
+import org.opendaylight.yangtools.concepts.SemVer;
import org.opendaylight.yangtools.yang.model.api.EffectiveModelContext;
+import org.opendaylight.yangtools.yang.model.parser.api.YangParser;
+import org.opendaylight.yangtools.yang.model.parser.api.YangParserException;
+import org.opendaylight.yangtools.yang.model.parser.api.YangParserFactory;
+import org.opendaylight.yangtools.yang.model.parser.api.YangSyntaxErrorException;
import org.opendaylight.yangtools.yang.model.repo.api.EffectiveModelContextFactory;
import org.opendaylight.yangtools.yang.model.repo.api.SchemaContextFactoryConfiguration;
-import org.opendaylight.yangtools.yang.model.repo.api.SchemaRepository;
import org.opendaylight.yangtools.yang.model.repo.api.SchemaResolutionException;
+import org.opendaylight.yangtools.yang.model.repo.api.SemVerSourceIdentifier;
import org.opendaylight.yangtools.yang.model.repo.api.SourceIdentifier;
import org.opendaylight.yangtools.yang.model.repo.api.StatementParserMode;
-import org.opendaylight.yangtools.yang.parser.impl.DefaultReactors;
-import org.opendaylight.yangtools.yang.parser.rfc7950.repo.ASTSchemaSource;
+import org.opendaylight.yangtools.yang.parser.rfc7950.ir.IRSchemaSource;
import org.opendaylight.yangtools.yang.parser.rfc7950.repo.YangModelDependencyInfo;
-import org.opendaylight.yangtools.yang.parser.rfc7950.repo.YangStatementStreamSource;
import org.opendaylight.yangtools.yang.parser.spi.meta.ReactorException;
-import org.opendaylight.yangtools.yang.parser.stmt.reactor.CrossSourceStatementReactor.BuildAction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
.weakValues().build();
private final Cache<Collection<SourceIdentifier>, EffectiveModelContext> semVerCache = CacheBuilder.newBuilder()
.weakValues().build();
- private final @NonNull SchemaRepository repository;
+ private final @NonNull SharedSchemaRepository repository;
private final @NonNull SchemaContextFactoryConfiguration config;
- SharedSchemaContextFactory(final @NonNull SchemaRepository repository,
- final @NonNull SchemaContextFactoryConfiguration config) {
+ SharedSchemaContextFactory(final @NonNull SharedSchemaRepository repository,
+ final @NonNull SchemaContextFactoryConfiguration config) {
this.repository = requireNonNull(repository);
this.config = requireNonNull(config);
}
final @NonNull Collection<SourceIdentifier> requiredSources) {
return createSchemaContext(requiredSources,
config.getStatementParserMode() == StatementParserMode.SEMVER_MODE ? semVerCache : revisionCache,
- new AssembleSources(config));
+ new AssembleSources(repository.factory(), config));
}
private @NonNull ListenableFuture<EffectiveModelContext> createSchemaContext(
final Collection<SourceIdentifier> requiredSources,
final Cache<Collection<SourceIdentifier>, EffectiveModelContext> cache,
- final AsyncFunction<List<ASTSchemaSource>, EffectiveModelContext> assembleSources) {
+ final AsyncFunction<List<IRSchemaSource>, EffectiveModelContext> assembleSources) {
// Make sources unique
final List<SourceIdentifier> uniqueSourceIdentifiers = deDuplicateSources(requiredSources);
}
// Request all sources be loaded
- ListenableFuture<List<ASTSchemaSource>> sf = Futures.allAsList(Collections2.transform(uniqueSourceIdentifiers,
+ ListenableFuture<List<IRSchemaSource>> sf = Futures.allAsList(Collections2.transform(uniqueSourceIdentifiers,
this::requestSource));
// Detect mismatch between requested Source IDs and IDs that are extracted from parsed source
final ListenableFuture<EffectiveModelContext> cf = Futures.transformAsync(sf, assembleSources,
MoreExecutors.directExecutor());
- // Populate cache when successful
+ final SettableFuture<EffectiveModelContext> rf = SettableFuture.create();
Futures.addCallback(cf, new FutureCallback<EffectiveModelContext>() {
@Override
public void onSuccess(final EffectiveModelContext result) {
- cache.put(uniqueSourceIdentifiers, result);
+ // Deduplicate concurrent loads
+ final EffectiveModelContext existing;
+ try {
+ existing = cache.get(uniqueSourceIdentifiers, () -> result);
+ } catch (ExecutionException e) {
+ LOG.warn("Failed to recheck result with cache, will use computed value", e);
+ rf.set(result);
+ return;
+ }
+
+ rf.set(existing);
}
@Override
public void onFailure(final Throwable cause) {
LOG.debug("Failed to assemble sources", cause);
+ rf.setException(cause);
}
}, MoreExecutors.directExecutor());
- return cf;
+ return rf;
}
- private ListenableFuture<ASTSchemaSource> requestSource(final @NonNull SourceIdentifier identifier) {
- return repository.getSchemaSource(identifier, ASTSchemaSource.class);
+ private ListenableFuture<IRSchemaSource> requestSource(final @NonNull SourceIdentifier identifier) {
+ return repository.getSchemaSource(identifier, IRSchemaSource.class);
}
/**
return ImmutableList.copyOf(uniqueSourceIdentifiers);
}
- private static final class SourceIdMismatchDetector implements Function<List<ASTSchemaSource>,
- List<ASTSchemaSource>> {
+ @SuppressModernizer
+ private static final class SourceIdMismatchDetector implements Function<List<IRSchemaSource>,
+ List<IRSchemaSource>> {
private final List<SourceIdentifier> sourceIdentifiers;
SourceIdMismatchDetector(final List<SourceIdentifier> sourceIdentifiers) {
}
@Override
- public List<ASTSchemaSource> apply(final List<ASTSchemaSource> input) {
- final Map<SourceIdentifier, ASTSchemaSource> filtered = new LinkedHashMap<>();
+ public List<IRSchemaSource> apply(final List<IRSchemaSource> input) {
+ final Map<SourceIdentifier, IRSchemaSource> filtered = new LinkedHashMap<>();
for (int i = 0; i < input.size(); i++) {
final SourceIdentifier expectedSId = sourceIdentifiers.get(i);
- final ASTSchemaSource astSchemaSource = input.get(i);
- final SourceIdentifier realSId = astSchemaSource.getIdentifier();
+ final IRSchemaSource irSchemaSource = input.get(i);
+ final SourceIdentifier realSId = irSchemaSource.getIdentifier();
if (!expectedSId.equals(realSId)) {
LOG.warn("Source identifier mismatch for module \"{}\", requested as {} but actually is {}. "
LOG.warn("Duplicate source for module {} detected in reactor", realSId);
}
- filtered.put(realSId, astSchemaSource);
+ filtered.put(realSId, irSchemaSource);
}
return ImmutableList.copyOf(filtered.values());
}
}
- private static final class AssembleSources implements AsyncFunction<List<ASTSchemaSource>, EffectiveModelContext> {
+ private static final class AssembleSources implements AsyncFunction<List<IRSchemaSource>, EffectiveModelContext> {
+ private final @NonNull YangParserFactory parserFactory;
private final @NonNull SchemaContextFactoryConfiguration config;
- private final @NonNull Function<ASTSchemaSource, SourceIdentifier> getIdentifier;
+ private final @NonNull Function<IRSchemaSource, SourceIdentifier> getIdentifier;
- private AssembleSources(final @NonNull SchemaContextFactoryConfiguration config) {
+ private AssembleSources(final @NonNull YangParserFactory parserFactory,
+ final @NonNull SchemaContextFactoryConfiguration config) {
+ this.parserFactory = parserFactory;
this.config = config;
switch (config.getStatementParserMode()) {
case SEMVER_MODE:
- this.getIdentifier = ASTSchemaSource::getSemVerIdentifier;
+ this.getIdentifier = AssembleSources::getSemVerIdentifier;
break;
default:
- this.getIdentifier = ASTSchemaSource::getIdentifier;
+ this.getIdentifier = IRSchemaSource::getIdentifier;
}
}
@Override
- public FluentFuture<EffectiveModelContext> apply(final List<ASTSchemaSource> sources)
+ public FluentFuture<EffectiveModelContext> apply(final List<IRSchemaSource> sources)
throws SchemaResolutionException, ReactorException {
- final Map<SourceIdentifier, ASTSchemaSource> srcs = Maps.uniqueIndex(sources, getIdentifier);
+ final Map<SourceIdentifier, IRSchemaSource> srcs = Maps.uniqueIndex(sources, getIdentifier);
final Map<SourceIdentifier, YangModelDependencyInfo> deps =
- Maps.transformValues(srcs, ASTSchemaSource::getDependencyInformation);
+ Maps.transformValues(srcs, YangModelDependencyInfo::forIR);
LOG.debug("Resolving dependency reactor {}", deps);
res.getResolvedSources(), res.getUnsatisfiedImports());
}
- final BuildAction reactor = DefaultReactors.defaultReactor().newBuild(statementParserMode);
- config.getSupportedFeatures().ifPresent(reactor::setSupportedFeatures);
- config.getModulesDeviatedByModules().ifPresent(reactor::setModulesWithSupportedDeviations);
+ final YangParser parser = parserFactory.createParser(statementParserMode);
+ config.getSupportedFeatures().ifPresent(parser::setSupportedFeatures);
+ config.getModulesDeviatedByModules().ifPresent(parser::setModulesWithSupportedDeviations);
- for (final Entry<SourceIdentifier, ASTSchemaSource> e : srcs.entrySet()) {
- final ASTSchemaSource ast = e.getValue();
- final ParserRuleContext parserRuleCtx = ast.getAST();
- checkArgument(parserRuleCtx instanceof StatementContext, "Unsupported context class %s for source %s",
- parserRuleCtx.getClass(), e.getKey());
-
- reactor.addSource(YangStatementStreamSource.create(e.getKey(), (StatementContext) parserRuleCtx,
- ast.getSymbolicName().orElse(null)));
+ for (final Entry<SourceIdentifier, IRSchemaSource> entry : srcs.entrySet()) {
+ try {
+ parser.addSource(entry.getValue());
+ } catch (YangSyntaxErrorException | IOException e) {
+ throw new SchemaResolutionException("Failed to add source " + entry.getKey(), e);
+ }
}
final EffectiveModelContext schemaContext;
try {
- schemaContext = reactor.buildEffective();
- } catch (final ReactorException ex) {
- throw new SchemaResolutionException("Failed to resolve required models", ex.getSourceIdentifier(), ex);
+ schemaContext = parser.buildEffectiveModel();
+ } catch (final YangParserException e) {
+ throw new SchemaResolutionException("Failed to resolve required models", e);
}
return immediateFluentFuture(schemaContext);
}
+
+ private static SemVerSourceIdentifier getSemVerIdentifier(final IRSchemaSource source) {
+ final SourceIdentifier identifier = source.getIdentifier();
+ final SemVer semver = YangModelDependencyInfo.findSemanticVersion(source.getRootStatement(), identifier);
+ if (identifier instanceof SemVerSourceIdentifier && semver == null) {
+ return (SemVerSourceIdentifier) identifier;
+ }
+
+ return SemVerSourceIdentifier.create(identifier.getName(), identifier.getRevision(), semver);
+ }
}
}