2 * Copyright (c) 2003, the JUNG Project and the Regents of the University
6 * This software is open-source under the BSD license; see either
8 * http://jung.sourceforge.net/license.txt for a description.
10 * Created on Feb 18, 2004
12 package edu.uci.ics.jung.algorithms.util;
14 import java.util.Collection;
15 import java.util.Iterator;
18 * A utility class for calculating properties of discrete distributions.
19 * Generally, these distributions are represented as arrays of
20 * <code>double</code> values, which are assumed to be normalized
21 * such that the entries in a single array sum to 1.
23 * @author Joshua O'Madadhain
25 public class DiscreteDistribution
29 * Returns the Kullback-Leibler divergence between the
30 * two specified distributions, which must have the same
31 * number of elements. This is defined as
32 * the sum over all <code>i</code> of
33 * <code>dist[i] * Math.log(dist[i] / reference[i])</code>.
34 * Note that this value is not symmetric; see
35 * <code>symmetricKL</code> for a symmetric variant.
36 * @see #symmetricKL(double[], double[])
38 public static double KullbackLeibler(double[] dist, double[] reference)
42 checkLengths(dist, reference);
44 for (int i = 0; i < dist.length; i++)
46 if (dist[i] > 0 && reference[i] > 0)
47 distance += dist[i] * Math.log(dist[i] / reference[i]);
53 * Returns <code>KullbackLeibler(dist, reference) + KullbackLeibler(reference, dist)</code>.
54 * @see #KullbackLeibler(double[], double[])
56 public static double symmetricKL(double[] dist, double[] reference)
58 return KullbackLeibler(dist, reference)
59 + KullbackLeibler(reference, dist);
63 * Returns the squared difference between the
64 * two specified distributions, which must have the same
65 * number of elements. This is defined as
66 * the sum over all <code>i</code> of the square of
67 * <code>(dist[i] - reference[i])</code>.
69 public static double squaredError(double[] dist, double[] reference)
73 checkLengths(dist, reference);
75 for (int i = 0; i < dist.length; i++)
77 double difference = dist[i] - reference[i];
78 error += difference * difference;
84 * Returns the cosine distance between the two
85 * specified distributions, which must have the same number
86 * of elements. The distributions are treated as vectors
87 * in <code>dist.length</code>-dimensional space.
88 * Given the following definitions
90 * <li/><code>v</code> = the sum over all <code>i</code> of <code>dist[i] * dist[i]</code>
91 * <li/><code>w</code> = the sum over all <code>i</code> of <code>reference[i] * reference[i]</code>
92 * <li/><code>vw</code> = the sum over all <code>i</code> of <code>dist[i] * reference[i]</code>
94 * the value returned is defined as <code>vw / (Math.sqrt(v) * Math.sqrt(w))</code>.
96 public static double cosine(double[] dist, double[] reference)
98 double v_prod = 0; // dot product x*x
99 double w_prod = 0; // dot product y*y
100 double vw_prod = 0; // dot product x*y
102 checkLengths(dist, reference);
104 for (int i = 0; i < dist.length; i++)
106 vw_prod += dist[i] * reference[i];
107 v_prod += dist[i] * dist[i];
108 w_prod += reference[i] * reference[i];
110 // cosine distance between v and w
111 return vw_prod / (Math.sqrt(v_prod) * Math.sqrt(w_prod));
115 * Returns the entropy of this distribution.
116 * High entropy indicates that the distribution is
117 * close to uniform; low entropy indicates that the
118 * distribution is close to a Dirac delta (i.e., if
119 * the probability mass is concentrated at a single
120 * point, this method returns 0). Entropy is defined as
121 * the sum over all <code>i</code> of
122 * <code>-(dist[i] * Math.log(dist[i]))</code>
124 public static double entropy(double[] dist)
128 for (int i = 0; i < dist.length; i++)
131 total += dist[i] * Math.log(dist[i]);
137 * Throws an <code>IllegalArgumentException</code> if the two arrays are not of the same length.
139 protected static void checkLengths(double[] dist, double[] reference)
141 if (dist.length != reference.length)
142 throw new IllegalArgumentException("Arrays must be of the same length");
146 * Normalizes, with Lagrangian smoothing, the specified <code>double</code>
147 * array, so that the values sum to 1 (i.e., can be treated as probabilities).
148 * The effect of the Lagrangian smoothing is to ensure that all entries
149 * are nonzero; effectively, a value of <code>alpha</code> is added to each
150 * entry in the original array prior to normalization.
154 public static void normalize(double[] counts, double alpha)
156 double total_count = 0;
158 for (int i = 0; i < counts.length; i++)
159 total_count += counts[i];
161 for (int i = 0; i < counts.length; i++)
162 counts[i] = (counts[i] + alpha)
163 / (total_count + counts.length * alpha);
167 * Returns the mean of the specified <code>Collection</code> of
168 * distributions, which are assumed to be normalized arrays of
169 * <code>double</code> values.
170 * @see #mean(double[][])
172 public static double[] mean(Collection<double[]> distributions)
174 if (distributions.isEmpty())
175 throw new IllegalArgumentException("Distribution collection must be non-empty");
176 Iterator<double[]> iter = distributions.iterator();
177 double[] first = iter.next();
178 double[][] d_array = new double[distributions.size()][first.length];
180 for (int i = 1; i < d_array.length; i++)
181 d_array[i] = iter.next();
183 return mean(d_array);
187 * Returns the mean of the specified array of distributions,
188 * represented as normalized arrays of <code>double</code> values.
189 * Will throw an "index out of bounds" exception if the
190 * distribution arrays are not all of the same length.
192 public static double[] mean(double[][] distributions)
194 double[] d_mean = new double[distributions[0].length];
195 for (int j = 0; j < d_mean.length; j++)
198 for (int i = 0; i < distributions.length; i++)
199 for (int j = 0; j < d_mean.length; j++)
200 d_mean[j] += distributions[i][j] / distributions.length;