private static final class WaitInQueueExecutionHandler implements RejectedExecutionHandler {
@Override
public void rejectedExecution(final Runnable r, final ThreadPoolExecutor executor) {
- if( executor.isShutdown() ) {
+ if (executor.isShutdown() ) {
throw new RejectedExecutionException( "Executor has been shutdown." );
}
executor.shutdown();
try {
- if(!executor.awaitTermination(timeout, unit)) {
+ if (!executor.awaitTermination(timeout, unit)) {
executor.shutdownNow();
}
} catch( InterruptedException e ) {
task = super.poll( waitTime, TimeUnit.MILLISECONDS );
totalWaitTime -= POLL_WAIT_TIME_IN_MS;
- if( totalWaitTime <= 0 ) {
+ if (totalWaitTime <= 0 ) {
break;
}
public void submitNotification( final L listener, final N notification )
throws RejectedExecutionException {
- if( notification == null ) {
+ if (notification == null) {
return;
}
public void submitNotifications( final L listener, final Iterable<N> notifications )
throws RejectedExecutionException {
- if( notifications == null || listener == null ) {
+ if (notifications == null || listener == null) {
return;
}
- if( LOG.isTraceEnabled() ) {
+ if (LOG.isTraceEnabled()) {
LOG.trace( "{}: submitNotifications for listener {}: {}",
name, listener.toString(), notifications );
}
// will occur.
try {
- while( true ) {
+ while (true) {
NotificationTask existingTask = listenerCache.get( key );
- if( existingTask == null || !existingTask.submitNotifications( notifications ) ) {
+ if (existingTask == null || !existingTask.submitNotifications( notifications )) {
// Either there's no existing task or we couldn't add our notifications to the
// existing one because it's in the process of exiting and removing itself from
// shouldn't be called concurrently for the same listener as that would violate
// notification ordering. In any case loop back up and try again.
- if( newNotificationTask == null ) {
+ if (newNotificationTask == null) {
newNotificationTask = new NotificationTask( key, notifications );
}
existingTask = listenerCache.putIfAbsent( key, newNotificationTask );
- if( existingTask == null ) {
+ if (existingTask == null) {
// We were able to put our new task - now submit it to the executor and
// we're done. If it throws a RejectedxecutionException, let that propagate
name, listener.toString() );
}
- if( LOG.isTraceEnabled() ) {
+ if (LOG.isTraceEnabled()) {
LOG.trace( "{}: submitNotifications dine for listener {}",
name, listener.toString() );
}
*/
public List<ListenerNotificationQueueStats> getListenerNotificationQueueStats() {
List<ListenerNotificationQueueStats> statsList = new ArrayList<>( listenerCache.size() );
- for( NotificationTask task: listenerCache.values() ) {
+ for (NotificationTask task: listenerCache.values()) {
statsList.add( new ListenerNotificationQueueStats(
task.listenerKey.toString(), task.notificationQueue.size() ) );
}
this.listenerKey = listenerKey;
this.notificationQueue = new LinkedBlockingQueue<>( maxQueueCapacity );
- for( N notification: notifications ) {
+ for (N notification: notifications) {
this.notificationQueue.add( notification );
}
}
// Check the done flag - if true then #run is in the process of exiting so return
// false to indicate such. Otherwise, offer the notifications to the queue.
- if( done ) {
+ if (done) {
return false;
}
- for( N notification: notifications ) {
+ for (N notification: notifications) {
boolean notificationOfferAttemptSuccess = false;
// The offer is attempted for up to 10 minutes, with a status message printed each minute
for (int notificationOfferAttempts = 0;
- notificationOfferAttempts < MAX_NOTIFICATION_OFFER_ATTEMPTS; notificationOfferAttempts++ ) {
+ notificationOfferAttempts < MAX_NOTIFICATION_OFFER_ATTEMPTS; notificationOfferAttempts++) {
// Try to offer for up to a minute and log a message if it times out.
- if( LOG.isDebugEnabled() ) {
+ if (LOG.isDebugEnabled()) {
LOG.debug( "{}: Offering notification to the queue for listener {}: {}",
name, listenerKey.toString(), notification );
}
- if( notificationOfferAttemptSuccess = notificationQueue.offer(
- notification, 1, TimeUnit.MINUTES ) ) {
+ if (notificationOfferAttemptSuccess = notificationQueue.offer(
+ notification, 1, TimeUnit.MINUTES)) {
break;
}
try {
// Loop until we've dispatched all the notifications in the queue.
- while( true ) {
+ while (true) {
// Get the notification at the head of the queue, waiting a little bit for one
// to get offered.
N notification = notificationQueue.poll( 10, TimeUnit.MILLISECONDS );
- if( notification == null ) {
+ if (notification == null) {
// The queue is empty - try to get the queuingLock. If we can't get the lock
// then #submitNotifications is in the process of offering to the queue so
// we'll loop back up and poll the queue again.
- if( queuingLock.tryLock() ) {
+ if (queuingLock.tryLock()) {
try {
// Check the queuedNotifications flag to see if #submitNotifications
// Once we set the done flag and unlock, calls to
// #submitNotifications will fail and a new task will be created.
- if( !queuedNotifications ) {
+ if (!queuedNotifications) {
done = true;
break;
}
private void notifyListener( N notification ) {
- if( notification == null ) {
+ if (notification == null) {
return;
}
try {
- if( LOG.isDebugEnabled() ) {
+ if (LOG.isDebugEnabled()) {
LOG.debug( "{}: Invoking listener {} with notification: {}",
name, listenerKey.toString(), notification );
}
@Override
public boolean offer( final E e, final long timeout, final TimeUnit unit ) throws InterruptedException {
- if( super.offer( e, timeout, unit ) ) {
+ if (super.offer( e, timeout, unit ) ) {
updateLargestQueueSize();
return true;
}
@Override
public boolean offer( final E e ) {
- if( super.offer( e ) ) {
+ if (super.offer( e ) ) {
updateLargestQueueSize();
return true;
}
@After
public void tearDown() {
- if( listenerExecutor != null ) {
+ if (listenerExecutor != null ) {
listenerExecutor.shutdownNow();
}
- if( testExecutor != null ) {
+ if (testExecutor != null ) {
testExecutor.shutdownNow();
}
}
assertTrue( "ListenableFuture callback was not notified of onSuccess",
futureNotifiedLatch.await( 5, TimeUnit.SECONDS ) );
- if( assertError.get() != null ) {
+ if (assertError.get() != null ) {
throw assertError.get();
}
assertTrue( "ListenableFuture callback was not notified of onSuccess",
futureNotifiedLatch.await( 5, TimeUnit.SECONDS ) );
- if( assertError.get() != null ) {
+ if (assertError.get() != null ) {
throw assertError.get();
}
}
return executor.submit( new Callable<Void>() {
@Override
public Void call() throws Exception {
- if( blockingLatch != null ) {
+ if (blockingLatch != null ) {
Uninterruptibles.awaitUninterruptibly( blockingLatch );
}
return null;
return executor.submit( new Runnable() {
@Override
public void run() {
- if( blockingLatch != null ) {
+ if (blockingLatch != null ) {
Uninterruptibles.awaitUninterruptibly( blockingLatch );
}
}
return executor.submit( new Runnable() {
@Override
public void run() {
- if( blockingLatch != null ) {
+ if (blockingLatch != null ) {
Uninterruptibles.awaitUninterruptibly( blockingLatch );
}
}
@After
public void tearDown() {
- if( executor != null ) {
+ if (executor != null) {
executor.shutdownNow();
}
}
executor.execute( new Task( tasksRunLatch, blockLatch ) );
- for( int i = 0; i < nTasks - 1; i++ ) {
+ for (int i = 0; i < nTasks - 1; i++) {
executor.execute( new Task( null, null, null, null, 0 ) );
}
executor.execute( new Task( tasksRunLatch, blockLatch ) );
- for( int i = 0; i < nTasks - 1; i++ ) {
+ for (int i = 0; i < nTasks - 1; i++) {
try {
executor.execute( new Task( null, null, null, null, 0 ) );
fail( "Expected RejectedExecutionException" );
@After
public void tearDown() {
- if( executor != null ) {
+ if (executor != null ) {
executor.shutdownNow();
}
}
assertTrue( "Task did not complete - executor likely deadlocked",
futureCompletedLatch.await( 5, TimeUnit.SECONDS ) );
- if( caughtEx.get() != null ) {
+ if (caughtEx.get() != null ) {
throw caughtEx.get();
}
}
@Override
public Throwable extractWrappedTestEx( final Exception from ) {
- if( from instanceof ExecutionException ) {
+ if (from instanceof ExecutionException ) {
return ((ExecutionException)from).getCause();
}
@Override
public Throwable extractWrappedTestEx( final Exception from ) {
- if( from instanceof ExecutionException ) {
+ if (from instanceof ExecutionException ) {
return ((ExecutionException)from).getCause();
}
assertNotNull( "Expected returned exception is null", expectedTestEx );
assertEquals( "Exception type", TestException.class, expectedTestEx.getClass() );
- if( cause instanceof TestException ) {
+ if (cause instanceof TestException ) {
assertNull( "Expected null cause", expectedTestEx.getCause() );
} else {
assertSame( "TestException cause", cause, expectedTestEx.getCause() );
thread.interrupt();
assertEquals( "get call completed", true, doneLatch.await( 5, TimeUnit.SECONDS ) );
- if( assertError.get() != null ) {
+ if (assertError.get() != null ) {
throw assertError.get();
}
}
void onNotification( N data ) {
try {
- if( sleepTime > 0 ) {
+ if (sleepTime > 0) {
Uninterruptibles.sleepUninterruptibly( sleepTime, TimeUnit.MILLISECONDS );
}
- if( cacheNotifications ) {
+ if (cacheNotifications) {
actual.add( data );
}
RuntimeException localRuntimeEx = runtimeEx;
- if( localRuntimeEx != null ) {
+ if (localRuntimeEx != null) {
runtimeEx = null;
throw localRuntimeEx;
}
Error localJvmError = jvmError;
- if( localJvmError != null ) {
+ if (localJvmError != null) {
jvmError = null;
throw localJvmError;
}
void verifyNotifications() {
boolean done = Uninterruptibles.awaitUninterruptibly( latch, 10, TimeUnit.SECONDS );
- if( !done ) {
+ if (!done) {
long actualCount = latch.getCount();
fail( name + ": Received " + (expCount - actualCount) +
" notifications. Expected " + expCount );
@After
public void tearDown() {
- if( queueExecutor != null ) {
+ if (queueExecutor != null) {
queueExecutor.shutdownNow();
}
}
List<Integer> expNotifications = Lists.newArrayListWithCapacity( nNotifications );
expNotifications.addAll( Arrays.asList( 1, 2, 3, 4, 5, 6 ) );
- for( int i = 1; i <= nNotifications - initialCount; i++ ) {
+ for (int i = 1; i <= nNotifications - initialCount; i++) {
Integer v = Integer.valueOf( initialCount + i );
expNotifications.add( v );
manager.submitNotification( listener, v );
" notifications each..." );
final Integer[] notifications = new Integer[nNotifications];
- for( int i = 1; i <= nNotifications; i++ ) {
+ for (int i = 1; i <= nNotifications; i++) {
notifications[i-1] = Integer.valueOf( i );
}
Stopwatch stopWatch = Stopwatch.createStarted();
List<TestListener<Integer>> listeners = Lists.newArrayList();
- for( int i = 1; i <= nListeners; i++ ) {
+ for (int i = 1; i <= nListeners; i++) {
final TestListener<Integer> listener =
i == 2 ? new TestListener2<>(nNotifications, i) :
i == 3 ? new TestListener3<>(nNotifications, i) :
new Thread( new Runnable() {
@Override
public void run() {
- for( int j = 1; j <= nNotifications; j++ ) {
+ for (int j = 1; j <= nNotifications; j++) {
final Integer n = notifications[j-1];
stagingExecutor.execute( new Runnable() {
@Override
}
try {
- for( TestListener<Integer> listener: listeners ) {
+ for (TestListener<Integer> listener: listeners) {
listener.verifyNotifications();
System.out.println( listener.name + " succeeded" );
}
@After
public void tearDown() {
- if( executor != null ) {
+ if (executor != null) {
executor.shutdownNow();
}
}
executor = SpecialExecutors.newBoundedFastThreadPool( 1, 1, "TestPool" );
- for( int i = 0; i < 5; i++ ) {
+ for (int i = 0; i < 5; i++) {
executor.execute( new Task( null, null, null, null,
TimeUnit.MICROSECONDS.convert( 5, TimeUnit.SECONDS ) ) );
}
ExecutorService executor = SpecialExecutors.newBoundedCachedThreadPool( 1, 1, "TestPool" );
- for( int i = 0; i < 5; i++ ) {
+ for (int i = 0; i < 5; i++) {
executor.execute( new Task( null, null, null, null,
TimeUnit.MICROSECONDS.convert( 5, TimeUnit.SECONDS ) ) );
}
new Thread() {
@Override
public void run() {
- for( int i = 0; i < numTasksToRun; i++ ) {
-// if(i%100 == 0) {
+ for (int i = 0; i < numTasksToRun; i++) {
+// if (i%100 == 0) {
// Uninterruptibles.sleepUninterruptibly( 20, TimeUnit.MICROSECONDS );
// }
stopWatch.stop();
- if( !done ) {
+ if (!done) {
fail( (numTasksToRun - tasksRunLatch.getCount()) + " tasks out of " +
numTasksToRun + " executed" );
}
- if( threadError.get() != null ) {
+ if (threadError.get() != null) {
throw threadError.get();
}
System.out.println( taskCountPerThread.size() + " threads used:" );
- for( Map.Entry<Thread, AtomicLong> e : taskCountPerThread.entrySet() ) {
+ for (Map.Entry<Thread, AtomicLong> e : taskCountPerThread.entrySet()) {
System.out.println( " " + e.getKey().getName() + " - " + e.getValue() + " tasks" );
}
public void run() {
try {
try {
- if( delay > 0 ) {
+ if (delay > 0) {
TimeUnit.MICROSECONDS.sleep( delay );
- } else if( blockLatch != null ) {
+ } else if (blockLatch != null) {
blockLatch.await();
}
} catch( InterruptedException e ) {}
- if( expThreadPrefix != null ) {
+ if (expThreadPrefix != null) {
assertEquals( "Thread name starts with " + expThreadPrefix, true,
Thread.currentThread().getName().startsWith( expThreadPrefix ) );
}
- if( taskCountPerThread != null ) {
+ if (taskCountPerThread != null) {
AtomicLong count = taskCountPerThread.get( Thread.currentThread() );
- if( count == null ) {
+ if (count == null) {
count = new AtomicLong( 0 );
AtomicLong prev = taskCountPerThread.putIfAbsent( Thread.currentThread(), count );
- if( prev != null ) {
+ if (prev != null) {
count = prev;
}
}
}
} catch( AssertionError e ) {
- if( threadError != null ) {
+ if (threadError != null) {
threadError.set( e );
}
} finally {
- if( tasksRunLatch != null ) {
+ if (tasksRunLatch != null) {
tasksRunLatch.countDown();
}
}
if (node instanceof LeafSetEntryNode) {
final LeafSetEntryNode<?> nodeAsLeafList = (LeafSetEntryNode<?>)node;
final QName name = nodeAsLeafList.getIdentifier().getNodeType();
- if(writer instanceof NormalizedNodeStreamAttributeWriter) {
+ if (writer instanceof NormalizedNodeStreamAttributeWriter) {
((NormalizedNodeStreamAttributeWriter) writer).leafSetEntryNode(name, nodeAsLeafList.getValue(),
nodeAsLeafList.getAttributes());
} else {
return true;
} else if (node instanceof LeafNode) {
final LeafNode<?> nodeAsLeaf = (LeafNode<?>)node;
- if(writer instanceof NormalizedNodeStreamAttributeWriter) {
+ if (writer instanceof NormalizedNodeStreamAttributeWriter) {
((NormalizedNodeStreamAttributeWriter) writer).leafNode(nodeAsLeaf.getIdentifier(), nodeAsLeaf.getValue(), nodeAsLeaf.getAttributes());
} else {
writer.leafNode(nodeAsLeaf.getIdentifier(), nodeAsLeaf.getValue());
protected boolean wasProcessedAsCompositeNode(final NormalizedNode<?, ?> node) throws IOException {
if (node instanceof ContainerNode) {
final ContainerNode n = (ContainerNode) node;
- if(writer instanceof NormalizedNodeStreamAttributeWriter) {
+ if (writer instanceof NormalizedNodeStreamAttributeWriter) {
((NormalizedNodeStreamAttributeWriter) writer).startContainerNode(n.getIdentifier(), childSizeHint(n.getValue()), n.getAttributes());
} else {
writer.startContainerNode(n.getIdentifier(), childSizeHint(n.getValue()));
@Override
protected boolean writeMapEntryNode(final MapEntryNode node) throws IOException {
final NormalizedNodeStreamWriter nnWriter = getWriter();
- if(nnWriter instanceof NormalizedNodeStreamAttributeWriter) {
+ if (nnWriter instanceof NormalizedNodeStreamAttributeWriter) {
((NormalizedNodeStreamAttributeWriter) nnWriter).startMapEntryNode(node.getIdentifier(), childSizeHint(node.getValue()), node.getAttributes());
} else {
nnWriter.startMapEntryNode(node.getIdentifier(), childSizeHint(node.getValue()));
T current = tree;
int i = 1;
- for(PathArgument pathArg : path.getPathArguments()) {
+ for (PathArgument pathArg : path.getPathArguments()) {
Optional<T> potential = current.getChild(pathArg);
if (!potential.isPresent()) {
throw new IllegalArgumentException(String.format("Child %s is not present in tree.",
tracker.endNode();
context = context.endNode(codecs.getSchemaContext(), writer);
- if(context instanceof JSONStreamWriterRootContext) {
+ if (context instanceof JSONStreamWriterRootContext) {
context.emitEnd(writer);
}
}
* without wrapping array.
*
*/
- if(isArray(parent)) {
+ if (isArray(parent)) {
parent = newArrayEntry(parent);
}
while (in.hasNext()) {
StatementStreamSource[] sources = new StatementStreamSource[files.length];
- for(int i = 0; i<files.length; i++) {
+ for (int i = 0; i<files.length; i++) {
sources[i] = new YangStatementSourceImpl(new FileInputStream(files[i]));
}
final Object parent = getParent();
SchemaNode schema = null;
final QName qname = name.getNodeType();
- if(parent instanceof DataNodeContainer) {
+ if (parent instanceof DataNodeContainer) {
schema = ((DataNodeContainer)parent).getDataChildByName(qname);
- if(schema == null && parent instanceof GroupingDefinition) {
+ if (schema == null && parent instanceof GroupingDefinition) {
schema = ((GroupingDefinition) parent);
}
- if(schema == null && parent instanceof NotificationDefinition) {
+ if (schema == null && parent instanceof NotificationDefinition) {
schema = ((NotificationDefinition) parent);
}
- } else if(parent instanceof ChoiceSchemaNode) {
+ } else if (parent instanceof ChoiceSchemaNode) {
schema = findChildInCases((ChoiceSchemaNode) parent, qname);
} else {
throw new IllegalStateException("Unsupported schema type "+ parent.getClass() +" on stack.");
private static SchemaNode findChildInCases(final ChoiceSchemaNode parent, final QName qname) {
DataSchemaNode schema = null;
- for(final ChoiceCaseNode caze : parent.getCases()) {
+ for (final ChoiceCaseNode caze : parent.getCases()) {
final DataSchemaNode potential = caze.getDataChildByName(qname);
- if(potential != null) {
+ if (potential != null) {
schema = potential;
break;
}
private static SchemaNode findCaseByChild(final ChoiceSchemaNode parent, final QName qname) {
DataSchemaNode schema = null;
- for(final ChoiceCaseNode caze : parent.getCases()) {
+ for (final ChoiceCaseNode caze : parent.getCases()) {
final DataSchemaNode potential = caze.getDataChildByName(qname);
- if(potential != null) {
+ if (potential != null) {
schema = caze;
break;
}
Object parent = getParent();
Preconditions.checkArgument(parent instanceof AugmentationTarget, "Augmentation not allowed under %s", parent);
- if(parent instanceof ChoiceSchemaNode) {
+ if (parent instanceof ChoiceSchemaNode) {
final QName name = Iterables.get(identifier.getPossibleChildNames(), 0);
parent = findCaseByChild((ChoiceSchemaNode) parent, name);
}
Preconditions.checkArgument(parent instanceof DataNodeContainer, "Augmentation allowed only in DataNodeContainer",parent);
final AugmentationSchema schema = SchemaUtils.findSchemaForAugment((AugmentationTarget) parent, identifier.getPossibleChildNames());
final HashSet<DataSchemaNode> realChildSchemas = new HashSet<>();
- for(final DataSchemaNode child : schema.getChildNodes()) {
+ for (final DataSchemaNode child : schema.getChildNodes()) {
realChildSchemas.add(((DataNodeContainer) parent).getDataChildByName(child.getQName()));
}
final AugmentationSchema resolvedSchema = new EffectiveAugmentationSchema(schema, realChildSchemas);
private void writeStartElement(final QName qname) throws XMLStreamException {
String ns = qname.getNamespace().toString();
writer.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, qname.getLocalName(), ns);
- if(writer.getNamespaceContext() != null) {
+ if (writer.getNamespaceContext() != null) {
String parentNs = writer.getNamespaceContext().getNamespaceURI(XMLConstants.DEFAULT_NS_PREFIX);
if (!ns.equals(parentNs)) {
writer.writeDefaultNamespace(ns);
try {
final String namespace = qNameStringEntry.getKey().getNamespace().toString();
- if(Strings.isNullOrEmpty(namespace)) {
+ if (Strings.isNullOrEmpty(namespace)) {
writer.writeAttribute(qNameStringEntry.getKey().getLocalName(), qNameStringEntry.getValue());
} else {
final String prefix = randomPrefix.encodePrefix(qNameStringEntry.getKey().getNamespace());
public static Optional<ModifyAction> getModifyOperationFromAttributes(final Element xmlElement) {
Attr attributeNodeNS = xmlElement.getAttributeNodeNS(OPERATION_ATTRIBUTE_QNAME.getNamespace().toString(), OPERATION_ATTRIBUTE_QNAME.getLocalName());
- if(attributeNodeNS == null) {
+ if (attributeNodeNS == null) {
return Optional.absent();
}
}
public void writeInstanceIdentifier(final XMLStreamWriter writer, final YangInstanceIdentifier value) throws XMLStreamException {
- if(schemaContext.isPresent()) {
+ if (schemaContext.isPresent()) {
RandomPrefixInstanceIdentifierSerializer iiCodec = new RandomPrefixInstanceIdentifierSerializer(schemaContext.get());
String serializedValue = iiCodec.serialize(value);
writeNamespaceDeclarations(writer,iiCodec.getPrefixes());
final Set<ModuleImport> imports = module.getImports();
for (final ModuleImport moduleImport : imports) {
- if(moduleImport.getPrefix().equals(prefix)) {
+ if (moduleImport.getPrefix().equals(prefix)) {
return moduleImport;
}
}
}
}
}
- // FIXME if(node instance of UnkeyedListNode ...
+ // FIXME if (node instance of UnkeyedListNode ...
}
private static LeafRefContext findReferencingCtxUnderChoice(
@Override
public void startMapEntryNode(final NodeIdentifierWithPredicates identifier, final int childSizeHint) {
- if(!(getCurrent() instanceof NormalizedNodeResultBuilder)) {
+ if (!(getCurrent() instanceof NormalizedNodeResultBuilder)) {
Preconditions.checkArgument(getCurrent() instanceof ImmutableMapNodeBuilder || getCurrent() instanceof ImmutableOrderedMapNodeBuilder);
}
abstract boolean isMixin();
public void addModifyOpIfPresent(final Optional<Entry<QName,ModifyAction>> operation, final AttributesBuilder<?> builder) {
- if(operation.isPresent()) {
+ if (operation.isPresent()) {
builder.withAttributes(Collections.singletonMap(operation.get().getKey(), modifyOperationToXmlString(operation.get().getValue())));
}
}
@Override
public NormalizedNode<?, ?> create(final YangInstanceIdentifier instanceId, final Optional<NormalizedNode<?, ?>> deepestChild, final Optional<Entry<QName,ModifyAction>> operation) {
- if(deepestChild.isPresent()) {
+ if (deepestChild.isPresent()) {
Preconditions.checkState(deepestChild instanceof AnyXmlNode);
final NormalizedNodeAttrBuilder<NodeIdentifier, DOMSource, AnyXmlNode> anyXmlBuilder =
Builders.anyXmlBuilder().withNodeIdentifier(getIdentifier()).withValue(((AnyXmlNode) deepestChild).getValue());
final PathArgument pathArgument = instanceId.getPathArguments().get(0);
final NormalizedNodeAttrBuilder<? extends PathArgument, Object, ? extends NormalizedNode<? extends PathArgument, Object>> builder = getBuilder(pathArgument);
- if(deepestChild.isPresent()) {
+ if (deepestChild.isPresent()) {
builder.withValue(deepestChild.get().getValue());
}
write(qNameToNodes.get(schemaNode.getQName()), schemaNode);
}
}
- } else if(parentSchemaNode instanceof ChoiceSchemaNode) {
+ } else if (parentSchemaNode instanceof ChoiceSchemaNode) {
for (ChoiceCaseNode ccNode : ((ChoiceSchemaNode) parentSchemaNode).getCases()) {
for (DataSchemaNode dsn : ccNode.getChildNodes()) {
if (qNameToNodes.containsKey(dsn.getQName())) {
for (ChoiceCaseNode choiceCaseNode : schema.getCases()) {
schemaForAugment = findAugment(choiceCaseNode, qNames);
- if(schemaForAugment.isPresent()) {
+ if (schemaForAugment.isPresent()) {
break;
}
}
}
}));
- if(qNamesFromAugment.equals(qNames)) {
+ if (qNamesFromAugment.equals(qNames)) {
return Optional.of(augment);
}
}
}
for (AugmentationSchema augmentationSchema : ((AugmentationTarget) schema).getAvailableAugmentations()) {
- if(augmentationSchema.getDataChildByName(childSchema.getQName()) != null) {
+ if (augmentationSchema.getDataChildByName(childSchema.getQName()) != null) {
return true;
}
}
Set<QName> allChildNodes = Sets.newHashSet();
for (DataSchemaNode childSchema : nodeContainer.getChildNodes()) {
- if(childSchema instanceof ChoiceSchemaNode) {
+ if (childSchema instanceof ChoiceSchemaNode) {
for (ChoiceCaseNode choiceCaseNode : ((ChoiceSchemaNode) childSchema).getCases()) {
allChildNodes.addAll(getChildNodesRecursive(choiceCaseNode));
}
- } else if(childSchema instanceof AugmentationSchema || childSchema instanceof ChoiceCaseNode) {
+ } else if (childSchema instanceof AugmentationSchema || childSchema instanceof ChoiceCaseNode) {
allChildNodes.addAll(getChildNodesRecursive((DataNodeContainer) childSchema));
}
else {
Set<DataSchemaNode> realChildNodes = Sets.newHashSet();
- if(targetSchema instanceof DataNodeContainer) {
+ if (targetSchema instanceof DataNodeContainer) {
realChildNodes = getRealSchemasForAugment((DataNodeContainer)targetSchema, augmentSchema);
- } else if(targetSchema instanceof ChoiceSchemaNode) {
+ } else if (targetSchema instanceof ChoiceSchemaNode) {
for (DataSchemaNode dataSchemaNode : augmentSchema.getChildNodes()) {
for (ChoiceCaseNode choiceCaseNode : ((ChoiceSchemaNode) targetSchema).getCases()) {
- if(getChildNodesRecursive(choiceCaseNode).contains(dataSchemaNode.getQName())) {
+ if (getChildNodesRecursive(choiceCaseNode).contains(dataSchemaNode.getQName())) {
realChildNodes.add(choiceCaseNode.getDataChildByName(dataSchemaNode.getQName()));
}
}
currentAugmentChildNodes.add(dataSchemaNode.getQName());
}
- if(childToProcess.getPossibleChildNames().equals(currentAugmentChildNodes)){
+ if (childToProcess.getPossibleChildNames().equals(currentAugmentChildNodes)){
return true;
}
}
SchemaNode current = Preconditions.checkNotNull(schemaContext);
for (final QName qname : path.getPathFromRoot()) {
SchemaNode child;
- if(current instanceof DataNodeContainer) {
+ if (current instanceof DataNodeContainer) {
child = ((DataNodeContainer) current).getDataChildByName(qname);
if (child == null && current instanceof SchemaContext) {
child = tryFindGroupings((SchemaContext) current, qname).orNull();
}
- if(child == null && current instanceof SchemaContext) {
+ if (child == null && current instanceof SchemaContext) {
child = tryFindNotification((SchemaContext) current, qname)
.or(tryFindRpc(((SchemaContext) current), qname)).orNull();
}
@Override
public DataContainerNodeBuilder<NodeIdentifier, ChoiceNode> withChild(final DataContainerChild<?, ?> child) {
- if(validator == null) {
+ if (validator == null) {
Optional<ChoiceCaseNode> detectedCaseOpt = SchemaUtils.detectCase(schema, child);
DataValidationException.checkLegalChild(detectedCaseOpt.isPresent(), child.getIdentifier(), schema);
validator = new DataNodeContainerValidator(detectedCaseOpt.get());
final PathArgument identifier = childId.getIdentifier();
// Augmentation nodes cannot be keys, and do not have to be present in childrenQNamesToPaths map
- if(isAugment(identifier)) {
+ if (isAugment(identifier)) {
continue;
}
@Override
public DataContainerNodeAttrBuilder<NodeIdentifierWithPredicates, MapEntryNode> withChild(final DataContainerChild<?, ?> child) {
// Augmentation nodes cannot be keys, and do not have to be present in childrenQNamesToPaths map
- if(!isAugment(child.getIdentifier())) {
+ if (!isAugment(child.getIdentifier())) {
childrenQNamesToPaths.put(child.getNodeType(), child.getIdentifier());
}
this.childNodes = getChildNodes(schema);
- if(schema instanceof AugmentationTarget) {
+ if (schema instanceof AugmentationTarget) {
for (AugmentationSchema augmentationSchema : ((AugmentationTarget) schema).getAvailableAugmentations()) {
augments.add(SchemaUtils.getNodeIdentifierForAugmentation(augmentationSchema));
}
Set<QName> allChildNodes = Sets.newHashSet();
for (DataSchemaNode childSchema : nodeContainer.getChildNodes()) {
- if(childSchema instanceof ChoiceCaseNode) {
+ if (childSchema instanceof ChoiceCaseNode) {
allChildNodes.addAll(getChildNodes((DataNodeContainer) childSchema));
} else if (!(childSchema instanceof AugmentationSchema)) {
allChildNodes.add(childSchema.getQName());
final List<Element> childNodesCollection = Lists.newArrayList();
final NodeList childNodes = node.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
- if(childNodes.item(i) instanceof Element) {
+ if (childNodes.item(i) instanceof Element) {
childNodesCollection.add((Element) childNodes.item(i));
}
}
}
// Skip namespace definitions
- if(namespace.equals(XMLConstants.XMLNS_ATTRIBUTE_NS_URI)) {
+ if (namespace.equals(XMLConstants.XMLNS_ATTRIBUTE_NS_URI)) {
continue;
}
private static AugmentationSchema createAugmentProxy(final AugmentationSchema schema, final DataNodeContainer resolved) {
final Set<DataSchemaNode> realChildSchemas = new HashSet<>();
- for(final DataSchemaNode augChild : schema.getChildNodes()) {
+ for (final DataSchemaNode augChild : schema.getChildNodes()) {
realChildSchemas.add(resolved.getDataChildByName(augChild.getQName()));
}
return new EffectiveAugmentationSchema(schema, realChildSchemas);
@Override
void verifyStructure(final NormalizedNode<?, ?> writtenValue, final boolean verifyChildren) {
- if(verifyChildrenStructure() && verifyChildren) {
+ if (verifyChildrenStructure() && verifyChildren) {
enforceCases(writtenValue);
}
super.verifyStructure(writtenValue, verifyChildren);
ModifiedNode modification = rootNode;
int i = 1;
- for(final PathArgument pathArg : path.getPathArguments()) {
+ for (final PathArgument pathArg : path.getPathArguments()) {
final Optional<ModificationApplyOperation> potential = operation.getChild(pathArg);
if (!potential.isPresent()) {
throw new SchemaValidationFailedException(String.format("Child %s is not present in schema tree.",
private static SchemaAwareApplyOperation fromLeafListSchemaNode(final LeafListSchemaNode schemaNode, final TreeType treeType) {
final SchemaAwareApplyOperation op;
- if(schemaNode.isUserOrdered()) {
+ if (schemaNode.isUserOrdered()) {
op = new OrderedLeafSetModificationStrategy(schemaNode, treeType);
} else {
op = new UnorderedLeafSetModificationStrategy(schemaNode, treeType);
* it should not cause transaction to fail, since result of this merge
* leads to same data.
*/
- if(!original.get().getData().equals(current.get().getData())) {
+ if (!original.get().getData().equals(current.get().getData())) {
checkNotConflicting(path, original.get(), current.get());
}
}
final Optional<TreeNode> original = modification.getOriginal();
if (original.isPresent() && current.isPresent()) {
checkNotConflicting(path, original.get(), current.get());
- } else if(original.isPresent()) {
+ } else if (original.isPresent()) {
throw new ConflictingModificationAppliedException(path,"Node was deleted by other transaction.");
} else if (current.isPresent()) {
throw new ConflictingModificationAppliedException(path, "Node was created by other transaction.");
public static EnumTypeDefinition toEnumTypeDefinition( String... enums ) {
List<EnumTypeDefinition.EnumPair> enumList = Lists.newArrayList();
- for( String en: enums ) {
+ for (String en: enums ) {
EnumTypeDefinition.EnumPair mockEnum = mock( EnumTypeDefinition.EnumPair.class );
when( mockEnum.getName() ).thenReturn( en );
enumList.add( mockEnum );
public static BitsTypeDefinition toBitsTypeDefinition( String... bits ) {
List<BitsTypeDefinition.Bit> bitList = Lists.newArrayList();
- for( String bit: bits ) {
+ for (String bit: bits ) {
BitsTypeDefinition.Bit mockBit = mock( BitsTypeDefinition.Bit.class );
when( mockBit.getName() ).thenReturn( bit );
bitList.add( mockBit );
UnmodifiableCollection<?> collectionChildren = (UnmodifiableCollection<?>) minMaxListRead.get().getValue();
for (Object collectionChild : collectionChildren) {
- if(collectionChild.toString().contains(first)){
+ if (collectionChild.toString().contains(first)){
assertTrue(collectionChild.toString().contains(first));
} else {
assertTrue(collectionChild.toString().contains(second));
StatementStreamSource[] sources = new StatementStreamSource[files.length];
- for(int i = 0; i<files.length; i++) {
+ for (int i = 0; i<files.length; i++) {
sources[i] = new YangStatementSourceImpl(new FileInputStream(files[i]));
}
writer.nextDataSchemaNode(schema);
if (schema.getKeyDefinition().isEmpty()) {
writer.startUnkeyedList(provideNodeIdentifier(), childSizeHint());
- } else if(schema.isUserOrdered()) {
+ } else if (schema.isUserOrdered()) {
writer.startOrderedMapNode(provideNodeIdentifier(), childSizeHint());
} else {
writer.startMapNode(provideNodeIdentifier(), childSizeHint());
final List<DataSchemaNode> ret = new ArrayList<>();
final ContainerSchemaNode input = getInput();
final ContainerSchemaNode output = getOutput();
- if(input != null) {
+ if (input != null) {
ret.add(input);
}
- if(output != null) {
+ if (output != null) {
ret.add(output);
}
return ret;
skipCurrentChar();
checkValid(!allCharactersConsumed(), "Identifier cannot end with '/'.");
QName name = nextQName();
- if(allCharactersConsumed() || SLASH == currentChar()) {
+ if (allCharactersConsumed() || SLASH == currentChar()) {
return computeIdentifier(name);
} else {
checkValid(PRECONDITION_START == currentChar(), "Last element must be identifier, predicate or '/'");
private DataSchemaContextNode<?> nextContextNode(QName name) {
current = current.getChild(name);
checkValid(current != null, "%s is not correct schema node identifier.",name);
- while(current.isMixin()) {
+ while (current.isMixin()) {
product.add(current.getIdentifier());
current = current.getChild(name);
}
checkValid(currentNode.isKeyedEntry(), "Entry %s does not allow specifying predicates.", name);
ImmutableMap.Builder<QName,Object> keyValues = ImmutableMap.builder();
- while(!allCharactersConsumed() && PRECONDITION_START == currentChar()) {
+ while (!allCharactersConsumed() && PRECONDITION_START == currentChar()) {
skipCurrentChar();
skipWhitespaces();
final QName key;
- if(DOT == currentChar()) {
+ if (DOT == currentChar()) {
key = null;
skipCurrentChar();
} else {
checkCurrentAndSkip(PRECONDITION_END, "Precondition must ends with ']'");
// Break-out from method for leaf-list case
- if(key == null && currentNode.isLeaf()) {
+ if (key == null && currentNode.isLeaf()) {
checkValid(offset == data.length(), "Leaf argument must be last argument of instance identifier.");
return new YangInstanceIdentifier.NodeWithValue(name, keyValue);
}
// Consume prefix or identifie
final String maybePrefix = nextIdentifier();
final String prefix,localName;
- if(COLON == currentChar()) {
+ if (COLON == currentChar()) {
// previous token is prefix;
prefix = maybePrefix;
skipCurrentChar();
}
private void nextSequenceEnd(CharMatcher matcher) {
- while(!allCharactersConsumed() && matcher.matches(data.charAt(offset))) {
+ while (!allCharactersConsumed() && matcher.matches(data.charAt(offset))) {
offset++;
}
}
File actualGeneratedYangDir = new File(project.getBuild().getDirectory(),
CodeGeneratorArg.YANG_GENERATED_DIR.replace("target" + File.separator, ""));
- if(!actualGeneratedYangDir.equals(generatedYangDir)) {
+ if (!actualGeneratedYangDir.equals(generatedYangDir)) {
addYangsToMetaInf(project, yangFilesRootDir, excludedFiles, actualGeneratedYangDir);
}
}
static Map<QName,StatementDefinition> mapFrom(final Collection<ExtensionDefinition> definitions) {
final HashMap<QName,StatementDefinition> ret = new HashMap<>(definitions.size());
- for(final ExtensionDefinition def : definitions) {
+ for (final ExtensionDefinition def : definitions) {
final StatementDefinition value = from(def);
ret.put(value.getStatementName(), value);
}
}
private void emitMust(@Nullable final MustDefinition mustCondition) {
- if(mustCondition != null && mustCondition.getXpath() != null) {
+ if (mustCondition != null && mustCondition.getXpath() != null) {
writer.startMustNode(mustCondition.getXpath());
emitErrorMessageNode(mustCondition.getErrorMessage());
emitErrorAppTagNode(mustCondition.getErrorAppTag());
emitStatusNode(augmentation.getStatus());
emitDescriptionNode(augmentation.getDescription());
emitReferenceNode(augmentation.getReference());
- for(final UsesNode uses: augmentation.getUses()) {
+ for (final UsesNode uses: augmentation.getUses()) {
emitUsesNode(uses);
}
}
private void emitWhen(final RevisionAwareXPath revisionAwareXPath) {
- if(revisionAwareXPath != null) {
+ if (revisionAwareXPath != null) {
writer.startWhenNode(revisionAwareXPath);
writer.endNode();
}
emitDescriptionNode(rpc.getDescription());
emitReferenceNode(rpc.getReference());
- for(final TypeDefinition<?> typedef : rpc.getTypeDefinitions()) {
+ for (final TypeDefinition<?> typedef : rpc.getTypeDefinitions()) {
emitTypedefNode(typedef);
}
- for(final GroupingDefinition grouping : rpc.getGroupings()) {
+ for (final GroupingDefinition grouping : rpc.getGroupings()) {
emitGrouping(grouping);
}
emitInput(rpc.getInput());
private static <T> boolean isPrefix(final Iterable<T> prefix, final Iterable<T> other) {
final Iterator<T> prefixIt = prefix.iterator();
final Iterator<T> otherIt = other.iterator();
- while(prefixIt.hasNext()) {
- if(!otherIt.hasNext()) {
+ while (prefixIt.hasNext()) {
+ if (!otherIt.hasNext()) {
return false;
}
- if(!Objects.deepEquals(prefixIt.next(), otherIt.next())) {
+ if (!Objects.deepEquals(prefixIt.next(), otherIt.next())) {
return false;
}
}
public void writeArgument(final SchemaPath targetPath) {
checkArgumentApplicable();
final StringBuilder valueStr = new StringBuilder();
- if(targetPath.isAbsolute()) {
+ if (targetPath.isAbsolute()) {
valueStr.append("/");
}
final Iterator<QName> argIt = targetPath.getPathFromRoot().iterator();
- while(argIt.hasNext()) {
+ while (argIt.hasNext()) {
valueStr.append(toPrefixedString(argIt.next()));
- if(argIt.hasNext()) {
+ if (argIt.hasNext()) {
valueStr.append("/");
}
}
}
private @Nullable String ensureAndGetXmlNamespacePrefix(final URI namespace) {
- if(YangConstants.RFC6020_YANG_NAMESPACE.equals(namespace)) {
+ if (YangConstants.RFC6020_YANG_NAMESPACE.equals(namespace)) {
// YANG namespace does not have prefix if used in arguments.
return null;
// FIXME: declare prefix
prefix =prefixToNamespace.inverse().get(namespace);
}
- if(prefix == null) {
+ if (prefix == null) {
throw new IllegalArgumentException("Namespace " + namespace + " is not bound to imported prefixes.");
}
return prefix;
private static Map<String, URI> prefixToNamespace(final SchemaContext ctx, final Module module) {
final BiMap<String, URI> prefixMap = HashBiMap.create(module.getImports().size() + 1);
prefixMap.put(module.getPrefix(), module.getNamespace());
- for(final ModuleImport imp : module.getImports()) {
+ for (final ModuleImport imp : module.getImports()) {
final String prefix = imp.getPrefix();
final URI namespace = getModuleNamespace(ctx,imp.getModuleName());
prefixMap.put(prefix, namespace);
}
private static URI getModuleNamespace(final SchemaContext ctx, final String moduleName) {
- for(final Module module : ctx.getModules()) {
- if(moduleName.equals(module.getName())) {
+ for (final Module module : ctx.getModules()) {
+ if (moduleName.equals(module.getName())) {
return module.getNamespace();
}
}
checkSupportedRepresentation(representation);
- if(!storageDirectory.exists()) {
+ if (!storageDirectory.exists()) {
Preconditions.checkArgument(storageDirectory.mkdirs(), "Unable to create cache directory at %s", storageDirectory);
}
Preconditions.checkArgument(storageDirectory.exists());
private static void checkSupportedRepresentation(final Class<? extends SchemaSourceRepresentation> representation) {
for (final Class<? extends SchemaSourceRepresentation> supportedRepresentation : STORAGE_ADAPTERS.keySet()) {
- if(supportedRepresentation.isAssignableFrom(representation)) {
+ if (supportedRepresentation.isAssignableFrom(representation)) {
return;
}
}
@Override
public synchronized CheckedFuture<? extends T, SchemaSourceException> getSource(final SourceIdentifier sourceIdentifier) {
final File file = sourceIdToFile(sourceIdentifier, storageDirectory);
- if(file.exists() && file.canRead()) {
+ if (file.exists() && file.canRead()) {
LOG.trace("Source {} found in cache as {}", sourceIdentifier, file);
final SchemaSourceRepresentation restored = STORAGE_ADAPTERS.get(representation).restore(sourceIdentifier, file);
return Futures.immediateCheckedFuture(representation.cast(restored));
protected synchronized void offer(final T source) {
LOG.trace("Source {} offered to cache", source.getIdentifier());
final File file = sourceIdToFile(source);
- if(file.exists()) {
+ if (file.exists()) {
LOG.debug("Source {} already in cache as {}", source.getIdentifier(), file);
return;
}
fileName = com.google.common.io.Files.getNameWithoutExtension(fileName);
final Optional<SourceIdentifier> si = getSourceIdentifier(fileName);
- if(si.isPresent()) {
+ if (si.isPresent()) {
LOG.trace("Restoring cached file {} as {}", file, si.get());
cachedSchemas.add(si.get());
} else {
}
public static TypeDefinition<?> from(final TypeDefinition<?> type) {
- if(type instanceof ExtendedType) {
+ if (type instanceof ExtendedType) {
return from((ExtendedType) type);
}
return type;
if (baseType instanceof StringTypeDefinition) {
return new DerivedStringType(type);
}
- if(baseType instanceof UnionTypeDefinition) {
+ if (baseType instanceof UnionTypeDefinition) {
return new DerivedUnionType(type);
}
- if(baseType instanceof EnumTypeDefinition) {
+ if (baseType instanceof EnumTypeDefinition) {
return new DerivedEnumType(type);
}
- if(baseType instanceof BitsTypeDefinition) {
+ if (baseType instanceof BitsTypeDefinition) {
return new DerivedBitsType(type);
}
throw new IllegalArgumentException("Not supported base type of " + baseType.getClass());
private EnumerationType(final SchemaPath path, final List<EnumPair> enums, final Optional<EnumPair> defaultEnum) {
this.path = Preconditions.checkNotNull(path,"path must not be null");
this.enums = ImmutableList.copyOf(Preconditions.checkNotNull(enums, "enums must not be null."));
- if(defaultEnum.isPresent()) {
+ if (defaultEnum.isPresent()) {
Preconditions.checkArgument(enums.contains(defaultEnum.get()),"defaultEnum must be contained in defined enumerations.");
this.defaultEnum = defaultEnum.get();
} else {
for (ModuleId rootModule : rootModules) {
- if(rootModule.equals(new ModuleId(module.getName(), module.getRevision()))) {
+ if (rootModule.equals(new ModuleId(module.getName(), module.getRevision()))) {
return true;
}
//handling/checking imports regarding root modules
for (ModuleImport moduleImport : module.getImports()) {
- if(moduleImport.getModuleName().equals(rootModule.getName())) {
+ if (moduleImport.getModuleName().equals(rootModule.getName())) {
- if(moduleImport.getRevision() != null && !moduleImport.getRevision().equals(rootModule.getRev())) {
+ if (moduleImport.getRevision() != null && !moduleImport.getRevision().equals(rootModule.getRev())) {
return false;
}
// FIXME this is just to preserve backwards compatibility since yangtools do not mind wrong leafref xpaths
// and current expected behaviour for such cases is to just use pure string
// This should throw an exception about incorrect XPath in leafref
- if(dataSchemaNode == null) {
+ if (dataSchemaNode == null) {
return null;
}
final TypeDefinition<?> targetTypeDefinition = typeDefinition(dataSchemaNode);
- if(targetTypeDefinition instanceof LeafrefTypeDefinition) {
+ if (targetTypeDefinition instanceof LeafrefTypeDefinition) {
return getBaseTypeForLeafRef(((LeafrefTypeDefinition) targetTypeDefinition), schemaContext, dataSchemaNode);
} else {
return targetTypeDefinition;
}
public static Optional<SchemaNode> getOriginalIfPossible(final SchemaNode node) {
- if(node instanceof DerivableSchemaNode) {
+ if (node instanceof DerivableSchemaNode) {
@SuppressWarnings("unchecked")
final Optional<SchemaNode> ret = (Optional<SchemaNode>) (((DerivableSchemaNode) node).getOriginal());
return ret;
public static SchemaNode getRootOriginalIfPossible(final SchemaNode data) {
Optional<SchemaNode> previous = Optional.absent();
Optional<SchemaNode> next = getOriginalIfPossible(data);
- while(next.isPresent()) {
+ while (next.isPresent()) {
previous = next;
next = getOriginalIfPossible(next.get());
}
Set<Module> modSet = Sets.newHashSet();
- if(expected!=null) {
+ if (expected!=null) {
modSet = Sets.newHashSet(expected);
}
assertEquals(modSet, modSetFiltering);
//asserting collections
- if(expected!=null) {
+ if (expected!=null) {
for (final Module module : expected) {
assertEquals(module, filteringSchemaContextProxy.findModuleByName(module.getName(), module.getRevision()));
Set<Module> modulesSet = new HashSet<>();
- if(modules!=null) {
+ if (modules!=null) {
modulesSet = Sets.newHashSet(modules);
Set<ModuleId> moduleIds = Sets.newHashSet();
- if(modules!=null && modules.size()>0) {
+ if (modules!=null && modules.size()>0) {
for (Module module : modules) {
*/
@Override
public Module build() {
- if(instance != null) {
+ if (instance != null) {
return instance;
}
final DocumentedNodeBuilder documentedNode;
- if(node instanceof DocumentedNodeBuilder) {
+ if (node instanceof DocumentedNodeBuilder) {
documentedNode = ((DocumentedNodeBuilder) node);
} else {
documentedNode = null;
if (description != null) {
- if(documentedNode != null) {
+ if (documentedNode != null) {
documentedNode.setDescription(description);
} else {
throw new YangParseException(moduleName, line, String.format("Cannot refine description in of target %s",refine.getTargetPathString()));
String reference = refine.getReference();
if (reference != null) {
- if(documentedNode != null) {
+ if (documentedNode != null) {
documentedNode.setReference(reference);
} else {
throw new YangParseException(moduleName, line, String.format("Cannot refine reference in of target %s",refine.getTargetPathString()));
Boolean config = refine.isConfiguration();
if (config != null) {
- if(node instanceof DataSchemaNodeBuilder) {
+ if (node instanceof DataSchemaNodeBuilder) {
((DataSchemaNodeBuilder) node).setConfiguration(config);
} else {
throw new YangParseException(moduleName, line, String.format("Cannot refine config of target %s ",refine.getTargetPathString()));
final String str = stringNode.getText();
char firstChar = str.charAt(0);
final CharMatcher quoteMatcher;
- if(SINGLE_QUOTE_MATCHER.matches(firstChar)) {
+ if (SINGLE_QUOTE_MATCHER.matches(firstChar)) {
quoteMatcher = SINGLE_QUOTE_MATCHER;
} else if (DOUBLE_QUOTE_MATCHER.matches(firstChar)) {
quoteMatcher = DOUBLE_QUOTE_MATCHER;
public static <T extends ParserRuleContext> Optional<T> getFirstContext(final ParserRuleContext context,final Class<T> contextType) {
List<T> potential = context.getRuleContexts(contextType);
- if(potential.isEmpty()) {
+ if (potential.isEmpty()) {
return Optional.absent();
}
return Optional.of(potential.get(0));
final Set<ModuleImport> dependencies = dep.getDependencies();
// in case of submodule, remember belongs to
- if(dep instanceof YangModelDependencyInfo.SubmoduleDependencyInfo) {
+ if (dep instanceof YangModelDependencyInfo.SubmoduleDependencyInfo) {
final String parent = ((YangModelDependencyInfo.SubmoduleDependencyInfo) dep).getParentModule();
submodules.put(id, new BelongsToDependency(parent));
}
protected static NamespaceStorageNode findClosestTowardsRoot(NamespaceStorageNode storage, StorageNodeType type) {
NamespaceStorageNode current = storage;
- while(current != null && current.getStorageNodeType() != type) {
+ while (current != null && current.getStorageNodeType() != type) {
current = current.getParentNamespaceStorage();
}
return current;
// We reset progressing to false.
progressing = false;
Iterator<SourceSpecificContext> currentSource = sourcesToProgress.iterator();
- while(currentSource.hasNext()) {
+ while (currentSource.hasNext()) {
SourceSpecificContext nextSourceCtx = currentSource.next();
PhaseCompletionProgress sourceProgress = nextSourceCtx.tryToCompletePhase(currentPhase);
switch (sourceProgress) {
private boolean removeSatisfied() {
Iterator<AbstractPrerequisite<?>> prereq = unsatisfied.iterator();
boolean allSatisfied = true;
- while(prereq.hasNext()) {
- if(prereq.next().isDone()) {
+ while (prereq.hasNext()) {
+ if (prereq.next().isDone()) {
// We are removing current prerequisite from list.
prereq.remove();
} else {
toNotify.add(listener);
}
}
- for(ValueAddedListener<K> listener : toNotify) {
+ for (ValueAddedListener<K> listener : toNotify) {
listener.trigger(value);
}
}
Map<K, V> localNamespace = (Map<K,V>) namespaces.get(type);
V potential = null;
- if(localNamespace != null) {
+ if (localNamespace != null) {
potential = localNamespace.get(key);
}
- if(potential == null && Utils.isModuleIdentifierWithoutSpecifiedRevision(key)) {
+ if (potential == null && Utils.isModuleIdentifierWithoutSpecifiedRevision(key)) {
potential = getRegardlessOfRevision((ModuleIdentifier)key,(Map<ModuleIdentifier,V>)localNamespace);
}
public <K, V, N extends IdentifierNamespace<K, V>> void addToLocalStorage(final Class<N> type, final K key, final V value) {
@SuppressWarnings("unchecked")
Map<K, V> localNamespace = (Map<K,V>) namespaces.get(type);
- if(localNamespace == null) {
+ if (localNamespace == null) {
checkLocalNamespaceAllowed(type);
localNamespace = new HashMap<>();
namespaces.put(type, localNamespace);
copy.addAllToCopyHistory(this.getCopyHistory());
copy.addToCopyHistory(typeOfCopy);
- if(this.getOriginalCtx() != null) {
+ if (this.getOriginalCtx() != null) {
copy.setOriginalCtx(this.getOriginalCtx());
} else {
copy.setOriginalCtx(this);
private PrefixToModule preLinkagePrefixes() {
PrefixToModuleMap preLinkagePrefixes = new PrefixToModuleMap(true);
Map<String, URI> prefixToNamespaceMap = getAllFromLocalStorage(ImpPrefixToNamespace.class);
- if(prefixToNamespaceMap == null) {
+ if (prefixToNamespaceMap == null) {
//:FIXME if it is a submodule without any import, the map is null. Handle also submodules and includes...
return null;
}
}
void defferedCreate() throws SourceException {
- if(current != null) {
+ if (current != null) {
parent = current.build();
current = null;
}
copy.addAllToCopyHistory(this.getCopyHistory());
copy.addToCopyHistory(typeOfCopy);
- if(this.getOriginalCtx() != null) {
+ if (this.getOriginalCtx() != null) {
copy.setOriginalCtx(this.getOriginalCtx());
} else {
copy.setOriginalCtx(this);
* If the statement argument is not QName, it cannot be mandatory statement,
* therefore return false and skip mandatory nodes validation
*/
- if(!(sourceCtx.getStatementArgument() instanceof QName)) {
+ if (!(sourceCtx.getStatementArgument() instanceof QName)) {
return false;
}
QName sourceStmtQName = (QName) sourceCtx.getStatementArgument();
private static NamespaceStorageNode globalOrStatementSpecific(final NamespaceBehaviour.NamespaceStorageNode storage) {
NamespaceStorageNode current = storage;
- while(current.getStorageNodeType() != StorageNodeType.STATEMENT_LOCAL && current.getStorageNodeType() != StorageNodeType.GLOBAL) {
+ while (current.getStorageNodeType() != StorageNodeType.STATEMENT_LOCAL && current.getStorageNodeType() != StorageNodeType.GLOBAL) {
current = current.getParentNamespaceStorage();
}
return current;
public StmtContext.Mutable<?, ?, EffectiveStatement<?, ?>> getFrom(NamespaceStorageNode storage, SchemaNodeIdentifier key) {
final NamespaceStorageNode lookupStartStorage;
- if(key.isAbsolute() || storage.getStorageNodeType() == StorageNodeType.ROOT_STATEMENT_LOCAL) {
+ if (key.isAbsolute() || storage.getStorageNodeType() == StorageNodeType.ROOT_STATEMENT_LOCAL) {
lookupStartStorage = NamespaceBehaviour.findClosestTowardsRoot(storage, StorageNodeType.GLOBAL);
} else {
lookupStartStorage = storage;
}
Iterator<QName> iterator = key.getPathFromRoot().iterator();
- if(!iterator.hasNext()) {
- if(lookupStartStorage instanceof StmtContext<?, ?, ?>) {
+ if (!iterator.hasNext()) {
+ if (lookupStartStorage instanceof StmtContext<?, ?, ?>) {
return (StmtContext.Mutable<?, ?, EffectiveStatement<?, ?>>) lookupStartStorage;
} else {
return null;
QName nextPath = iterator.next();
StmtContext.Mutable<?, ?, EffectiveStatement<?, ?>> current = (StmtContext.Mutable<?, ?, EffectiveStatement<?, ?>>) lookupStartStorage
.getFromLocalStorage(ChildSchemaNodes.class, nextPath);
- if(current == null && lookupStartStorage instanceof StmtContext<?, ?, ?>) {
+ if (current == null && lookupStartStorage instanceof StmtContext<?, ?, ?>) {
return tryToFindUnknownStatement(nextPath.getLocalName(), (Mutable<?, ?, EffectiveStatement<?, ?>>) lookupStartStorage);
}
while (current != null && iterator.hasNext()) {
final YangErrorListener errorListener = new YangErrorListener();
parser.addErrorListener(errorListener);
- if(stream instanceof NamedFileInputStream) {
+ if (stream instanceof NamedFileInputStream) {
sourceName = stream.toString();
} else {
sourceName = null;
final Collection<StatementContextBase<?, ?, ?>> effectiveSubstatements = ctx.effectiveSubstatements();
final Collection<StatementContextBase<?, ?, ?>> substatementsInit = new ArrayList<>();
- for(StatementContextBase<?, ?, ?> declaredSubstatement : ctx.declaredSubstatements()) {
+ for (StatementContextBase<?, ?, ?> declaredSubstatement : ctx.declaredSubstatements()) {
if (declaredSubstatement.getPublicDefinition().equals(Rfc6020Mapping.USES)) {
substatementsInit.add(declaredSubstatement);
substatementsInit.addAll(declaredSubstatement.getEffectOfStatement());
@Override
public List<UnknownSchemaNode> getUnknownSchemaNodes() {
- if(unknownNodes == null) {
+ if (unknownNodes == null) {
initUnknownSchemaNodes();
}
return unknownNodes;
static List<ModuleOrModuleBuilder> fromAll(final Collection<Module> modules, final Collection<ModuleBuilder> moduleBuilders) {
List<ModuleOrModuleBuilder> result = new ArrayList<>(modules.size() + moduleBuilders.size());
- for(Module m: modules){
+ for (Module m: modules){
result.add(new ModuleOrModuleBuilder(m));
}
for (ModuleBuilder mb : moduleBuilders) {
Set<TypeDefinition<?>> types = testModule.getTypeDefinitions();
TypeDefinition<?> intExt = null;
- for(TypeDefinition<?> td : types) {
- if("int-ext".equals(td.getQName().getLocalName())) {
+ for (TypeDefinition<?> td : types) {
+ if ("int-ext".equals(td.getQName().getLocalName())) {
intExt = td;
}
}
TypeDefinition<?> uint8 = null;
TypeDefinition<?> pv = null;
- for(TypeDefinition<?> td : union.getTypes()) {
- if("uint8".equals(td.getQName().getLocalName())) {
+ for (TypeDefinition<?> td : union.getTypes()) {
+ if ("uint8".equals(td.getQName().getLocalName())) {
uint8 = td;
- } else if("protocol-version".equals(td.getQName().getLocalName())) {
+ } else if ("protocol-version".equals(td.getQName().getLocalName())) {
pv = td;
}
}