Merge "Cleanup and correct two small equals bugs"
[genius.git] / itm / itm-impl / src / main / java / org / opendaylight / genius / itm / impl / ITMBatchingUtils.java
1 /*
2  * Copyright (c) 2017 Ericsson India Global Services Pvt Ltd. and others.  All rights reserved.
3  *
4  * This program and the accompanying materials are made available under the
5  * terms of the Eclipse Public License v1.0 which accompanies this distribution,
6  * and is available at http://www.eclipse.org/legal/epl-v10.html
7  */
8 package org.opendaylight.genius.itm.impl;
9
10 import org.opendaylight.controller.md.sal.binding.api.DataBroker;
11 import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
12 import org.opendaylight.genius.utils.batching.ActionableResource;
13 import org.opendaylight.genius.utils.batching.ActionableResourceImpl;
14 import org.opendaylight.genius.utils.batching.DefaultBatchHandler;
15 import org.opendaylight.genius.utils.batching.ResourceBatchingManager;
16 import org.opendaylight.yangtools.yang.binding.DataObject;
17 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
18 import org.slf4j.Logger;
19 import org.slf4j.LoggerFactory;
20
21 import java.util.concurrent.BlockingQueue;
22 import java.util.concurrent.LinkedBlockingQueue;
23
24 public class ITMBatchingUtils {
25     private static final Logger LOG = LoggerFactory.getLogger((Class)ITMBatchingUtils.class);
26     public static final int BATCH_SIZE = 1000;
27     public static final int PERIODICITY = 500;
28     public static Integer batchSize;
29     public static Integer batchInterval;
30     private static DataBroker dataBroker;
31     private static BlockingQueue<ActionableResource> defaultOperationalShardBufferQ;
32     private static BlockingQueue<ActionableResource> defaultConfigShardBufferQ;
33
34     // This could extend in future
35     public enum EntityType  {
36         DEFAULT_OPERATIONAL,
37         DEFAULT_CONFIG
38     }
39
40     public static DataBroker getBroker() {
41         return dataBroker;
42     }
43
44     public static void setBroker(DataBroker broker) {
45         dataBroker = broker;
46     }
47
48     public static void registerWithBatchManager(DataBroker dataBroker) {
49         ITMBatchingUtils.setBroker(dataBroker);
50         batchSize = 1000;
51         if (Integer.getInteger("batch.size") != null) {
52             batchSize = Integer.getInteger("batch.size");
53         }
54         batchInterval = 500;
55         if (Integer.getInteger("batch.wait.time") != null) {
56             batchInterval = Integer.getInteger("batch.wait.time");
57         }
58         ResourceBatchingManager resBatchingManager = ResourceBatchingManager.getInstance();
59         resBatchingManager.registerBatchableResource("ITM-DEFAULT-OPERATIONAL", defaultOperationalShardBufferQ, new DefaultBatchHandler(dataBroker, LogicalDatastoreType.OPERATIONAL, batchSize, batchInterval));
60         resBatchingManager.registerBatchableResource("ITM-DEFAULT-CONFIG", defaultConfigShardBufferQ, new DefaultBatchHandler(dataBroker, LogicalDatastoreType.CONFIGURATION, batchSize, batchInterval));
61     }
62
63
64     public static <T extends DataObject> void update(InstanceIdentifier<T> path, T data, EntityType entityType) {
65         ActionableResourceImpl actResource = new ActionableResourceImpl(path.toString());
66         actResource.setAction(ActionableResource.UPDATE);
67         actResource.setInstanceIdentifier(path);
68         actResource.setInstance(data);
69         LOG.debug("Adding to the Queue to batch the update DS Operation - Id {} data {}", path, data);
70         getQueue(entityType).add(actResource);
71     }
72
73     public static <T extends DataObject> void write(InstanceIdentifier<T> path, T data, EntityType entityType) {
74         ActionableResourceImpl actResource = new ActionableResourceImpl(path.toString());
75         actResource.setAction(ActionableResource.CREATE);
76         actResource.setInstanceIdentifier(path);
77         actResource.setInstance(data);
78         LOG.debug("Adding to the Queue to batch the write DS Operation - Id {} data {}", path, data);
79         getQueue(entityType).add(actResource);
80     }
81
82     public static BlockingQueue<ActionableResource> getQueue(EntityType entityType){
83        switch (entityType){
84            case DEFAULT_OPERATIONAL:return defaultOperationalShardBufferQ;
85            case DEFAULT_CONFIG: return defaultConfigShardBufferQ;
86        }
87        return null;
88     }
89
90     public static <T extends DataObject> void delete(InstanceIdentifier<T> path, EntityType entityType) {
91         ActionableResourceImpl actResource = new ActionableResourceImpl(path.toString());
92         actResource.setAction(ActionableResource.DELETE);
93         actResource.setInstanceIdentifier(path);
94         actResource.setInstance(null);
95         LOG.debug("Adding to the Queue to batch the delete DS Operation - Id {}", path);
96         getQueue(entityType).add(actResource);
97     }
98
99     static {
100         defaultOperationalShardBufferQ = new LinkedBlockingQueue<>();
101         defaultConfigShardBufferQ = new LinkedBlockingQueue<>();
102     }
103 }