########################################
# The contents of this file are subject to the MLX PUBLIC LICENSE version
# 1.0 (the "License"); you may not use this file except in
# compliance with the License.
# 
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.  See
# the License for the specific language governing rights and limitations
# under the License.
# 
# The Original Source Code is "compClust", released 2003 September 03.
# 
# The Original Source Code was developed by the California Institute of
# Technology (Caltech).  Portions created by Caltech are Copyright (C)
# 2002-2003 California Institute of Technology. All Rights Reserved.
########################################
# Compare the performance of the python and C DA module
import compClust.mlx.DA.DA as DA
import compClust.mlx.DA.cDA as cDA
import Numeric
import RandomArray

def makeMatrix(num_vectors, num_classes=1):
  """Construct a model to play with"""
  # FIXME: Where should creating a simulated model live
  # FIXME: in setUp where it gets called for each test
  # FIXME: or in init where it's constructed just once.
  cluster_size =  num_vectors/num_classes
  means = []
  covars = []
  means.append(Numeric.array([0, 0, 0]))
  covars.append(Numeric.array([[1, .5, .1],
                                    [.5, 1, .5],
                                    [.1, .5, 1],]))
  means.append(Numeric.array([-1,-1,-1]))
  covars.append( Numeric.array([[.5, .1, 0],
                                     [.1, .5, .1],
                                     [0, .1, .5]]))
  
  means.append(Numeric.array([1, 2, 3]))
  covars.append(Numeric.array([[1, .1, .1],
                                    [.1, 1, .1],
                                    [ .1, .1, 1],]))
  
  means = Numeric.asarray(means)
  covars = Numeric.asarray(covars)
  
  data = Numeric.zeros((cluster_size*num_classes,covars[0].shape[0]),'d')
  classes = Numeric.zeros((cluster_size*num_classes),'i')
  
  for cluster in xrange(num_classes):
    for point in xrange(cluster_size):
      index = cluster * cluster_size + point
      data[index] = RandomArray.multivariate_normal(means[cluster],
                                                         covars[cluster])
      classes[index] = cluster

  return data, classes, means, covars

def check_mixture_likelihood():
  data, classes, means, covars = makeMatrix(3000,3)
  
  daCovars, daWeights = DA.covar_weights_estimate(data, means, classes)
  daLikelihood = DA.mixture_likelihood(data, means, daCovars, daWeights)
  
  # .1 is pretty pathetic, but it takes a lot of points to tighten
  # the estimate
  #failUnless(Numeric.allclose(daCovars, covars, rtol=.1))
  
  cdaCovars, cdaWeights = cDA.covar_weights_estimate(data, means, classes)
  if not Numeric.allclose(cdaCovars, daCovars):
    print "covars didn't match"
  if not Numeric.allclose(cdaWeights, daWeights):
    print "weights didn't match"
    
  cdaLikelihood = cDA.mixture_likelihood(data, means, cdaCovars, cdaWeights)
  if not Numeric.allclose(daLikelihood, cdaLikelihood):
    print "likelihoods didn't match"

def test_vector_prob_gauss():
  data = [ [ 0, 0, 0, 0.1],
           [ 3, 3, 2.9, 3],
  	   [ 1, 1.1, 1, 1],
           [ 1.9, 2, 2, 2],
           [ 1, 1, 1, 1.1],
	   [ 3, 3, 2.9, 3],
	   [ 3, 3.1, 3, 3],
	   [ 1.9, 2, 2, 2],
	   [ 2, 2, 2, 2.1],
	   [ 3, 3, 2.9, 3] ]
  data = Numeric.asarray(data * 10)
  means = Numeric.asarray([ 1.  , 1.05, 1.  , 1.05,])
  covars = Numeric.asarray([[ 0. , 0. , 0.  , 0. ,],
    [ 0.        , 0.00263158, 0.        ,-0.00263158,],
    [ 0.        , 0.        , 0.        , 0.        ,],
    [ 0.        ,-0.00263158, 0.        , 0.00263158,]])
  print cDA.vector_prob_gauss(data, means, covars)



import profile
#profile.run('check_mixture_likelihood()')
test_vector_prob_gauss()

 
